8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classFileParser.hpp"
27 #include "classfile/fieldLayoutBuilder.hpp"
28 #include "jvm.h"
29 #include "memory/resourceArea.hpp"
30 #include "oops/array.hpp"
31 #include "oops/fieldStreams.inline.hpp"
32 #include "oops/instanceMirrorKlass.hpp"
33 #include "oops/instanceKlass.inline.hpp"
34 #include "oops/klass.inline.hpp"
35 #include "runtime/fieldDescriptor.inline.hpp"
36
37
38 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
39 _next_block(nullptr),
40 _prev_block(nullptr),
41 _kind(kind),
42 _offset(-1),
43 _alignment(1),
44 _size(size),
45 _field_index(-1),
46 _is_reference(false) {
47 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
48 "Otherwise, should use the constructor with a field index argument");
49 assert(size > 0, "Sanity check");
50 }
51
52
53 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
54 _next_block(nullptr),
55 _prev_block(nullptr),
56 _kind(kind),
57 _offset(-1),
58 _alignment(alignment),
59 _size(size),
60 _field_index(index),
61 _is_reference(is_reference) {
62 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
63 "Other kind do not have a field index");
64 assert(size > 0, "Sanity check");
65 assert(alignment > 0, "Sanity check");
66 }
67
68 bool LayoutRawBlock::fit(int size, int alignment) {
69 int adjustment = 0;
70 if ((_offset % alignment) != 0) {
71 adjustment = alignment - (_offset % alignment);
72 }
73 return _size >= size + adjustment;
74 }
75
76 FieldGroup::FieldGroup(int contended_group) :
77 _next(nullptr),
78 _primitive_fields(nullptr),
79 _oop_fields(nullptr),
80 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
81 _oop_count(0) {}
82
83 void FieldGroup::add_primitive_field(int idx, BasicType type) {
84 int size = type2aelembytes(type);
85 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
86 if (_primitive_fields == nullptr) {
87 _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
88 }
89 _primitive_fields->append(block);
90 }
91
92 void FieldGroup::add_oop_field(int idx) {
93 int size = type2aelembytes(T_OBJECT);
94 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
95 if (_oop_fields == nullptr) {
96 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
97 }
98 _oop_fields->append(block);
99 _oop_count++;
100 }
101
102 void FieldGroup::sort_by_size() {
103 if (_primitive_fields != nullptr) {
104 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
105 }
106 }
107
108 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
109 _field_info(field_info),
110 _cp(cp),
111 _blocks(nullptr),
112 _start(_blocks),
113 _last(_blocks) {}
114
115 void FieldLayout::initialize_static_layout() {
116 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
117 _blocks->set_offset(0);
118 _last = _blocks;
119 _start = _blocks;
120 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
121 // during bootstrapping, the size of the java.lang.Class is still not known when layout
122 // of static field is computed. Field offsets are fixed later when the size is known
123 // (see java_lang_Class::fixup_mirror())
124 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
125 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
126 _blocks->set_offset(0);
127 }
128 }
129
130 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
131 if (super_klass == nullptr) {
132 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
133 _blocks->set_offset(0);
134 _last = _blocks;
135 _start = _blocks;
136 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
137 } else {
138 bool has_fields = reconstruct_layout(super_klass);
139 fill_holes(super_klass);
140 if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) {
141 _start = _blocks; // start allocating fields from the first empty block
142 } else {
143 _start = _last; // append fields at the end of the reconstructed layout
144 }
145 }
146 }
147
148 LayoutRawBlock* FieldLayout::first_field_block() {
149 LayoutRawBlock* block = _start;
150 while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
151 && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
152 block = block->next_block();
153 }
154 return block;
155 }
156
157
158 // Insert a set of fields into a layout using a best-fit strategy.
159 // For each field, search for the smallest empty slot able to fit the field
160 // (satisfying both size and alignment requirements), if none is found,
161 // add the field at the end of the layout.
162 // Fields cannot be inserted before the block specified in the "start" argument
163 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
164 if (list == nullptr) return;
165 if (start == nullptr) start = this->_start;
166 bool last_search_success = false;
167 int last_size = 0;
168 int last_alignment = 0;
169 for (int i = 0; i < list->length(); i ++) {
170 LayoutRawBlock* b = list->at(i);
171 LayoutRawBlock* cursor = nullptr;
172 LayoutRawBlock* candidate = nullptr;
173
174 // if start is the last block, just append the field
175 if (start == last_block()) {
176 candidate = last_block();
177 }
178 // Before iterating over the layout to find an empty slot fitting the field's requirements,
179 // check if the previous field had the same requirements and if the search for a fitting slot
180 // was successful. If the requirements were the same but the search failed, a new search will
181 // fail the same way, so just append the field at the of the layout.
182 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
183 candidate = last_block();
184 } else {
185 // Iterate over the layout to find an empty slot fitting the field's requirements
186 last_size = b->size();
187 last_alignment = b->alignment();
188 cursor = last_block()->prev_block();
189 assert(cursor != nullptr, "Sanity check");
190 last_search_success = true;
191 while (cursor != start) {
192 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
193 if (candidate == nullptr || cursor->size() < candidate->size()) {
194 candidate = cursor;
195 }
196 }
197 cursor = cursor->prev_block();
198 }
199 if (candidate == nullptr) {
200 candidate = last_block();
201 last_search_success = false;
202 }
203 assert(candidate != nullptr, "Candidate must not be null");
204 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
205 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
206 }
207
208 insert_field_block(candidate, b);
209 }
210 }
211
212 // Used for classes with hard coded field offsets, insert a field at the specified offset */
213 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
214 assert(block != nullptr, "Sanity check");
215 block->set_offset(offset);
216 if (start == nullptr) {
217 start = this->_start;
218 }
219 LayoutRawBlock* slot = start;
220 while (slot != nullptr) {
221 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
222 slot == _last){
223 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
224 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
225 if (slot->offset() < block->offset()) {
226 int adjustment = block->offset() - slot->offset();
227 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
286 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
287 insert(slot, adj);
288 }
289 insert(slot, block);
290 if (slot->size() == 0) {
291 remove(slot);
292 }
293 _field_info->adr_at(block->field_index())->set_offset(block->offset());
294 return block;
295 }
296
297 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
298 bool has_instance_fields = false;
299 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
300 while (ik != nullptr) {
301 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
302 BasicType type = Signature::basic_type(fs.signature());
303 // distinction between static and non-static fields is missing
304 if (fs.access_flags().is_static()) continue;
305 has_instance_fields = true;
306 int size = type2aelembytes(type);
307 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
308 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
309 block->set_offset(fs.offset());
310 all_fields->append(block);
311 }
312 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
313 }
314
315 all_fields->sort(LayoutRawBlock::compare_offset);
316 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
317 _blocks->set_offset(0);
318 _last = _blocks;
319
320 for(int i = 0; i < all_fields->length(); i++) {
321 LayoutRawBlock* b = all_fields->at(i);
322 _last->set_next_block(b);
323 b->set_prev_block(_last);
324 _last = b;
325 }
326 _start = _blocks;
327 return has_instance_fields;
328 }
329
330 // Called during the reconstruction of a layout, after fields from super
331 // classes have been inserted. It fills unused slots between inserted fields
332 // with EMPTY blocks, so the regular field insertion methods would work.
333 // This method handles classes with @Contended annotations differently
334 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
335 // fields to interfere with contended fields/classes.
336 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
337 assert(_blocks != nullptr, "Sanity check");
338 assert(_blocks->offset() == 0, "first block must be at offset zero");
339 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
340 LayoutRawBlock* b = _blocks;
341 while (b->next_block() != nullptr) {
342 if (b->next_block()->offset() > (b->offset() + b->size())) {
343 int size = b->next_block()->offset() - (b->offset() + b->size());
344 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
345 empty->set_offset(b->offset() + b->size());
346 empty->set_next_block(b->next_block());
347 b->next_block()->set_prev_block(empty);
348 b->set_next_block(empty);
349 empty->set_prev_block(b);
350 }
351 b = b->next_block();
352 }
353 assert(b->next_block() == nullptr, "Invariant at this point");
354 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
355
356 // If the super class has @Contended annotation, a padding block is
357 // inserted at the end to ensure that fields from the subclasses won't share
358 // the cache line of the last field of the contended class
359 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
360 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
361 p->set_offset(b->offset() + b->size());
362 b->set_next_block(p);
363 p->set_prev_block(b);
364 b = p;
365 }
366
367 if (!UseEmptySlotsInSupers) {
368 // Add an empty slots to align fields of the subclass on a heapOopSize boundary
369 // in order to emulate the behavior of the previous algorithm
370 int align = (b->offset() + b->size()) % heapOopSize;
371 if (align != 0) {
372 int sz = heapOopSize - align;
373 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
374 p->set_offset(b->offset() + b->size());
375 b->set_next_block(p);
376 p->set_prev_block(b);
377 b = p;
378 }
379 }
380
381 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
382 last->set_offset(b->offset() + b->size());
383 assert(last->offset() > 0, "Sanity check");
384 b->set_next_block(last);
385 last->set_prev_block(b);
386 _last = last;
387 }
388
389 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
390 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
391 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
392 block->set_offset(slot->offset());
393 slot->set_offset(slot->offset() + block->size());
394 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
395 assert(slot->size() - block->size() >= 0, "no negative size allowed");
396 slot->set_size(slot->size() - block->size());
397 block->set_prev_block(slot->prev_block());
398 block->set_next_block(slot);
399 slot->set_prev_block(block);
400 if (block->prev_block() != nullptr) {
423 _start = block->prev_block();
424 }
425 }
426
427 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
428 ResourceMark rm;
429 LayoutRawBlock* b = _blocks;
430 while(b != _last) {
431 switch(b->kind()) {
432 case LayoutRawBlock::REGULAR: {
433 FieldInfo* fi = _field_info->adr_at(b->field_index());
434 output->print_cr(" @%d \"%s\" %s %d/%d %s",
435 b->offset(),
436 fi->name(_cp)->as_C_string(),
437 fi->signature(_cp)->as_C_string(),
438 b->size(),
439 b->alignment(),
440 "REGULAR");
441 break;
442 }
443 case LayoutRawBlock::FLATTENED: {
444 FieldInfo* fi = _field_info->adr_at(b->field_index());
445 output->print_cr(" @%d \"%s\" %s %d/%d %s",
446 b->offset(),
447 fi->name(_cp)->as_C_string(),
448 fi->signature(_cp)->as_C_string(),
449 b->size(),
450 b->alignment(),
451 "FLATTENED");
452 break;
453 }
454 case LayoutRawBlock::RESERVED: {
455 output->print_cr(" @%d %d/- %s",
456 b->offset(),
457 b->size(),
458 "RESERVED");
459 break;
460 }
461 case LayoutRawBlock::INHERITED: {
462 assert(!is_static, "Static fields are not inherited in layouts");
463 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
464 bool found = false;
465 const InstanceKlass* ik = super;
466 while (!found && ik != nullptr) {
467 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
468 if (fs.offset() == b->offset()) {
469 output->print_cr(" @%d \"%s\" %s %d/%d %s",
470 b->offset(),
471 fs.name()->as_C_string(),
472 fs.signature()->as_C_string(),
473 b->size(),
474 b->size(), // so far, alignment constraint == size, will change with Valhalla
475 "INHERITED");
476 found = true;
477 break;
478 }
479 }
480 ik = ik->java_super();
481 }
482 break;
483 }
484 case LayoutRawBlock::EMPTY:
485 output->print_cr(" @%d %d/1 %s",
486 b->offset(),
487 b->size(),
488 "EMPTY");
489 break;
490 case LayoutRawBlock::PADDING:
491 output->print_cr(" @%d %d/1 %s",
492 b->offset(),
493 b->size(),
494 "PADDING");
495 break;
496 }
497 b = b->next_block();
498 }
499 }
500
501 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
502 GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :
503 _classname(classname),
504 _super_klass(super_klass),
505 _constant_pool(constant_pool),
506 _field_info(field_info),
507 _info(info),
508 _root_group(nullptr),
509 _contended_groups(GrowableArray<FieldGroup*>(8)),
510 _static_fields(nullptr),
511 _layout(nullptr),
512 _static_layout(nullptr),
513 _nonstatic_oopmap_count(0),
514 _alignment(-1),
515 _has_nonstatic_fields(false),
516 _is_contended(is_contended) {}
517
518
519 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
520 assert(g > 0, "must only be called for named contended groups");
521 FieldGroup* fg = nullptr;
522 for (int i = 0; i < _contended_groups.length(); i++) {
523 fg = _contended_groups.at(i);
524 if (fg->contended_group() == g) return fg;
525 }
526 fg = new FieldGroup(g);
527 _contended_groups.append(fg);
528 return fg;
529 }
530
531 void FieldLayoutBuilder::prologue() {
532 _layout = new FieldLayout(_field_info, _constant_pool);
533 const InstanceKlass* super_klass = _super_klass;
534 _layout->initialize_instance_layout(super_klass);
535 if (super_klass != nullptr) {
536 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
537 }
538 _static_layout = new FieldLayout(_field_info, _constant_pool);
539 _static_layout->initialize_static_layout();
540 _static_fields = new FieldGroup();
541 _root_group = new FieldGroup();
542 }
543
544 // Field sorting for regular classes:
545 // - fields are sorted in static and non-static fields
546 // - non-static fields are also sorted according to their contention group
547 // (support of the @Contended annotation)
548 // - @Contended annotation is ignored for static fields
549 void FieldLayoutBuilder::regular_field_sorting() {
550 int idx = 0;
551 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
552 FieldInfo ctrl = _field_info->at(0);
553 FieldGroup* group = nullptr;
554 FieldInfo fieldinfo = *it;
555 if (fieldinfo.access_flags().is_static()) {
556 group = _static_fields;
557 } else {
558 _has_nonstatic_fields = true;
559 if (fieldinfo.field_flags().is_contended()) {
560 int g = fieldinfo.contended_group();
561 if (g == 0) {
562 group = new FieldGroup(true);
563 _contended_groups.append(group);
564 } else {
565 group = get_or_create_contended_group(g);
566 }
567 } else {
568 group = _root_group;
569 }
570 }
571 assert(group != nullptr, "invariant");
572 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
573 switch(type) {
574 case T_BYTE:
575 case T_CHAR:
576 case T_DOUBLE:
577 case T_FLOAT:
578 case T_INT:
579 case T_LONG:
580 case T_SHORT:
581 case T_BOOLEAN:
582 group->add_primitive_field(idx, type);
583 break;
584 case T_OBJECT:
585 case T_ARRAY:
586 if (group != _static_fields) _nonstatic_oopmap_count++;
587 group->add_oop_field(idx);
588 break;
589 default:
590 fatal("Something wrong?");
591 }
592 }
593 _root_group->sort_by_size();
594 _static_fields->sort_by_size();
595 if (!_contended_groups.is_empty()) {
596 for (int i = 0; i < _contended_groups.length(); i++) {
597 _contended_groups.at(i)->sort_by_size();
598 }
599 }
600 }
601
602 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
603 if (ContendedPaddingWidth > 0) {
604 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
605 _layout->insert(slot, padding);
606 }
607 }
608
609 // Computation of regular classes layout is an evolution of the previous default layout
610 // (FieldAllocationStyle 1):
611 // - primitive fields are allocated first (from the biggest to the smallest)
612 // - then oop fields are allocated, either in existing gaps or at the end of
613 // the layout
614 void FieldLayoutBuilder::compute_regular_layout() {
615 bool need_tail_padding = false;
616 prologue();
617 regular_field_sorting();
618
619 if (_is_contended) {
620 _layout->set_start(_layout->last_block());
621 // insertion is currently easy because the current strategy doesn't try to fill holes
622 // in super classes layouts => the _start block is by consequence the _last_block
623 insert_contended_padding(_layout->start());
624 need_tail_padding = true;
625 }
626 _layout->add(_root_group->primitive_fields());
627 _layout->add(_root_group->oop_fields());
628
629 if (!_contended_groups.is_empty()) {
630 for (int i = 0; i < _contended_groups.length(); i++) {
631 FieldGroup* cg = _contended_groups.at(i);
632 LayoutRawBlock* start = _layout->last_block();
633 insert_contended_padding(start);
634 _layout->add(cg->primitive_fields(), start);
635 _layout->add(cg->oop_fields(), start);
636 need_tail_padding = true;
637 }
638 }
639
640 if (need_tail_padding) {
641 insert_contended_padding(_layout->last_block());
642 }
643
644 _static_layout->add_contiguously(this->_static_fields->oop_fields());
645 _static_layout->add(this->_static_fields->primitive_fields());
646
647 epilogue();
648 }
649
650 void FieldLayoutBuilder::epilogue() {
651 // Computing oopmaps
652 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
653 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
654
655 OopMapBlocksBuilder* nonstatic_oop_maps =
656 new OopMapBlocksBuilder(max_oop_map_count);
657 if (super_oop_map_count > 0) {
658 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
659 _super_klass->nonstatic_oop_map_count());
660 }
661
662 if (_root_group->oop_fields() != nullptr) {
663 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
664 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
665 nonstatic_oop_maps->add(b->offset(), 1);
666 }
667 }
668
669 if (!_contended_groups.is_empty()) {
670 for (int i = 0; i < _contended_groups.length(); i++) {
671 FieldGroup* cg = _contended_groups.at(i);
672 if (cg->oop_count() > 0) {
673 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
674 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
675 }
676 }
677 }
678
679 nonstatic_oop_maps->compact();
680
681 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
682 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
683 int static_fields_size = (static_fields_end -
684 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
685 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
686
687 // Pass back information needed for InstanceKlass creation
688
689 _info->oop_map_blocks = nonstatic_oop_maps;
690 _info->_instance_size = align_object_size(instance_end / wordSize);
691 _info->_static_field_size = static_fields_size;
692 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
693 _info->_has_nonstatic_fields = _has_nonstatic_fields;
694
695 if (PrintFieldLayout) {
696 ResourceMark rm;
697 tty->print_cr("Layout of class %s", _classname->as_C_string());
698 tty->print_cr("Instance fields:");
699 _layout->print(tty, false, _super_klass);
700 tty->print_cr("Static fields:");
701 _static_layout->print(tty, true, nullptr);
702 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
703 tty->print_cr("---");
704 }
705 }
706
707 void FieldLayoutBuilder::build_layout() {
708 compute_regular_layout();
709 }
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classFileParser.hpp"
27 #include "classfile/fieldLayoutBuilder.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "jvm.h"
31 #include "memory/resourceArea.hpp"
32 #include "oops/array.hpp"
33 #include "oops/fieldStreams.inline.hpp"
34 #include "oops/instanceMirrorKlass.hpp"
35 #include "oops/instanceKlass.inline.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "oops/inlineKlass.inline.hpp"
38 #include "runtime/fieldDescriptor.inline.hpp"
39
40 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
41 _next_block(nullptr),
42 _prev_block(nullptr),
43 _inline_klass(nullptr),
44 _kind(kind),
45 _offset(-1),
46 _alignment(1),
47 _size(size),
48 _field_index(-1),
49 _is_reference(false) {
50 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
51 "Otherwise, should use the constructor with a field index argument");
52 assert(size > 0, "Sanity check");
53 }
54
55
56 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
57 _next_block(nullptr),
58 _prev_block(nullptr),
59 _inline_klass(nullptr),
60 _kind(kind),
61 _offset(-1),
62 _alignment(alignment),
63 _size(size),
64 _field_index(index),
65 _is_reference(is_reference) {
66 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
67 "Other kind do not have a field index");
68 assert(size > 0, "Sanity check");
69 assert(alignment > 0, "Sanity check");
70 }
71
72 bool LayoutRawBlock::fit(int size, int alignment) {
73 int adjustment = 0;
74 if ((_offset % alignment) != 0) {
75 adjustment = alignment - (_offset % alignment);
76 }
77 return _size >= size + adjustment;
78 }
79
80 FieldGroup::FieldGroup(int contended_group) :
81 _next(nullptr),
82 _small_primitive_fields(nullptr),
83 _big_primitive_fields(nullptr),
84 _oop_fields(nullptr),
85 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
86 _oop_count(0) {}
87
88 void FieldGroup::add_primitive_field(int idx, BasicType type) {
89 int size = type2aelembytes(type);
90 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
91 if (size >= oopSize) {
92 add_to_big_primitive_list(block);
93 } else {
94 add_to_small_primitive_list(block);
95 }
96 }
97
98 void FieldGroup::add_oop_field(int idx) {
99 int size = type2aelembytes(T_OBJECT);
100 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
101 if (_oop_fields == nullptr) {
102 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
103 }
104 _oop_fields->append(block);
105 _oop_count++;
106 }
107
108 void FieldGroup::add_flat_field(int idx, InlineKlass* vk) {
109 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, vk->get_exact_size_in_bytes(), vk->get_alignment(), false);
110 block->set_inline_klass(vk);
111 if (block->size() >= oopSize) {
112 add_to_big_primitive_list(block);
113 } else {
114 add_to_small_primitive_list(block);
115 }
116 }
117
118 void FieldGroup::sort_by_size() {
119 if (_small_primitive_fields != nullptr) {
120 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
121 }
122 if (_big_primitive_fields != nullptr) {
123 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
124 }
125 }
126
127 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
128 if (_small_primitive_fields == nullptr) {
129 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
130 }
131 _small_primitive_fields->append(block);
132 }
133
134 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
135 if (_big_primitive_fields == nullptr) {
136 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
137 }
138 _big_primitive_fields->append(block);
139 }
140
141 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
142 _field_info(field_info),
143 _cp(cp),
144 _blocks(nullptr),
145 _start(_blocks),
146 _last(_blocks) {}
147
148 void FieldLayout::initialize_static_layout() {
149 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
150 _blocks->set_offset(0);
151 _last = _blocks;
152 _start = _blocks;
153 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
154 // during bootstrapping, the size of the java.lang.Class is still not known when layout
155 // of static field is computed. Field offsets are fixed later when the size is known
156 // (see java_lang_Class::fixup_mirror())
157 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
158 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
159 _blocks->set_offset(0);
160 }
161 }
162
163 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
164 if (super_klass == nullptr) {
165 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
166 _blocks->set_offset(0);
167 _last = _blocks;
168 _start = _blocks;
169 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
170 } else {
171 bool has_fields = reconstruct_layout(super_klass);
172 fill_holes(super_klass);
173 if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) {
174 _start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses
175 // to allocate fields in empty slots of their super classes
176 } else {
177 _start = _last; // append fields at the end of the reconstructed layout
178 }
179 }
180 }
181
182 LayoutRawBlock* FieldLayout::first_field_block() {
183 LayoutRawBlock* block = _blocks;
184 while (block != nullptr
185 && block->kind() != LayoutRawBlock::INHERITED
186 && block->kind() != LayoutRawBlock::REGULAR
187 && block->kind() != LayoutRawBlock::FLAT) {
188 block = block->next_block();
189 }
190 return block;
191 }
192
193 // Insert a set of fields into a layout.
194 // For each field, search for an empty slot able to fit the field
195 // (satisfying both size and alignment requirements), if none is found,
196 // add the field at the end of the layout.
197 // Fields cannot be inserted before the block specified in the "start" argument
198 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
199 if (list == nullptr) return;
200 if (start == nullptr) start = this->_start;
201 bool last_search_success = false;
202 int last_size = 0;
203 int last_alignment = 0;
204 for (int i = 0; i < list->length(); i ++) {
205 LayoutRawBlock* b = list->at(i);
206 LayoutRawBlock* cursor = nullptr;
207 LayoutRawBlock* candidate = nullptr;
208 // if start is the last block, just append the field
209 if (start == last_block()) {
210 candidate = last_block();
211 }
212 // Before iterating over the layout to find an empty slot fitting the field's requirements,
213 // check if the previous field had the same requirements and if the search for a fitting slot
214 // was successful. If the requirements were the same but the search failed, a new search will
215 // fail the same way, so just append the field at the of the layout.
216 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
217 candidate = last_block();
218 } else {
219 // Iterate over the layout to find an empty slot fitting the field's requirements
220 last_size = b->size();
221 last_alignment = b->alignment();
222 cursor = last_block()->prev_block();
223 assert(cursor != nullptr, "Sanity check");
224 last_search_success = true;
225
226 while (cursor != start) {
227 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
228 if (candidate == nullptr || cursor->size() < candidate->size()) {
229 candidate = cursor;
230 }
231 }
232 cursor = cursor->prev_block();
233 }
234 if (candidate == nullptr) {
235 candidate = last_block();
236 last_search_success = false;
237 }
238 assert(candidate != nullptr, "Candidate must not be null");
239 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
240 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
241 }
242 insert_field_block(candidate, b);
243 }
244 }
245
246 // Used for classes with hard coded field offsets, insert a field at the specified offset */
247 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
248 assert(block != nullptr, "Sanity check");
249 block->set_offset(offset);
250 if (start == nullptr) {
251 start = this->_start;
252 }
253 LayoutRawBlock* slot = start;
254 while (slot != nullptr) {
255 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
256 slot == _last){
257 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
258 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
259 if (slot->offset() < block->offset()) {
260 int adjustment = block->offset() - slot->offset();
261 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
320 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
321 insert(slot, adj);
322 }
323 insert(slot, block);
324 if (slot->size() == 0) {
325 remove(slot);
326 }
327 _field_info->adr_at(block->field_index())->set_offset(block->offset());
328 return block;
329 }
330
331 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
332 bool has_instance_fields = false;
333 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
334 while (ik != nullptr) {
335 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
336 BasicType type = Signature::basic_type(fs.signature());
337 // distinction between static and non-static fields is missing
338 if (fs.access_flags().is_static()) continue;
339 has_instance_fields = true;
340 LayoutRawBlock* block;
341 if (fs.field_flags().is_null_free_inline_type()) {
342 InlineKlass* vk = InlineKlass::cast(ik->get_inline_type_field_klass(fs.index()));
343 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, vk->get_exact_size_in_bytes(),
344 vk->get_alignment(), false);
345
346 } else {
347 int size = type2aelembytes(type);
348 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
349 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
350 }
351 block->set_offset(fs.offset());
352 all_fields->append(block);
353 }
354 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
355 }
356 all_fields->sort(LayoutRawBlock::compare_offset);
357 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
358 _blocks->set_offset(0);
359 _last = _blocks;
360 for(int i = 0; i < all_fields->length(); i++) {
361 LayoutRawBlock* b = all_fields->at(i);
362 _last->set_next_block(b);
363 b->set_prev_block(_last);
364 _last = b;
365 }
366 _start = _blocks;
367 return has_instance_fields;
368 }
369
370 // Called during the reconstruction of a layout, after fields from super
371 // classes have been inserted. It fills unused slots between inserted fields
372 // with EMPTY blocks, so the regular field insertion methods would work.
373 // This method handles classes with @Contended annotations differently
374 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
375 // fields to interfere with contended fields/classes.
376 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
377 assert(_blocks != nullptr, "Sanity check");
378 assert(_blocks->offset() == 0, "first block must be at offset zero");
379 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
380 LayoutRawBlock* b = _blocks;
381 while (b->next_block() != nullptr) {
382 if (b->next_block()->offset() > (b->offset() + b->size())) {
383 int size = b->next_block()->offset() - (b->offset() + b->size());
384 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
385 empty->set_offset(b->offset() + b->size());
386 empty->set_next_block(b->next_block());
387 b->next_block()->set_prev_block(empty);
388 b->set_next_block(empty);
389 empty->set_prev_block(b);
390 }
391 b = b->next_block();
392 }
393 assert(b->next_block() == nullptr, "Invariant at this point");
394 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
395 // If the super class has @Contended annotation, a padding block is
396 // inserted at the end to ensure that fields from the subclasses won't share
397 // the cache line of the last field of the contended class
398 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
399 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
400 p->set_offset(b->offset() + b->size());
401 b->set_next_block(p);
402 p->set_prev_block(b);
403 b = p;
404 }
405 if (!UseEmptySlotsInSupers) {
406 // Add an empty slots to align fields of the subclass on a heapOopSize boundary
407 // in order to emulate the behavior of the previous algorithm
408 int align = (b->offset() + b->size()) % heapOopSize;
409 if (align != 0) {
410 int sz = heapOopSize - align;
411 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
412 p->set_offset(b->offset() + b->size());
413 b->set_next_block(p);
414 p->set_prev_block(b);
415 b = p;
416 }
417 }
418 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
419 last->set_offset(b->offset() + b->size());
420 assert(last->offset() > 0, "Sanity check");
421 b->set_next_block(last);
422 last->set_prev_block(b);
423 _last = last;
424 }
425
426 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
427 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
428 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
429 block->set_offset(slot->offset());
430 slot->set_offset(slot->offset() + block->size());
431 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
432 assert(slot->size() - block->size() >= 0, "no negative size allowed");
433 slot->set_size(slot->size() - block->size());
434 block->set_prev_block(slot->prev_block());
435 block->set_next_block(slot);
436 slot->set_prev_block(block);
437 if (block->prev_block() != nullptr) {
460 _start = block->prev_block();
461 }
462 }
463
464 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
465 ResourceMark rm;
466 LayoutRawBlock* b = _blocks;
467 while(b != _last) {
468 switch(b->kind()) {
469 case LayoutRawBlock::REGULAR: {
470 FieldInfo* fi = _field_info->adr_at(b->field_index());
471 output->print_cr(" @%d \"%s\" %s %d/%d %s",
472 b->offset(),
473 fi->name(_cp)->as_C_string(),
474 fi->signature(_cp)->as_C_string(),
475 b->size(),
476 b->alignment(),
477 "REGULAR");
478 break;
479 }
480 case LayoutRawBlock::FLAT: {
481 FieldInfo* fi = _field_info->adr_at(b->field_index());
482 output->print_cr(" @%d \"%s\" %s %d/%d %s",
483 b->offset(),
484 fi->name(_cp)->as_C_string(),
485 fi->signature(_cp)->as_C_string(),
486 b->size(),
487 b->alignment(),
488 "FLAT");
489 break;
490 }
491 case LayoutRawBlock::RESERVED: {
492 output->print_cr(" @%d %d/- %s",
493 b->offset(),
494 b->size(),
495 "RESERVED");
496 break;
497 }
498 case LayoutRawBlock::INHERITED: {
499 assert(!is_static, "Static fields are not inherited in layouts");
500 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
501 bool found = false;
502 const InstanceKlass* ik = super;
503 while (!found && ik != nullptr) {
504 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
505 if (fs.offset() == b->offset()) {
506 output->print_cr(" @%d \"%s\" %s %d/%d %s",
507 b->offset(),
508 fs.name()->as_C_string(),
509 fs.signature()->as_C_string(),
510 b->size(),
511 b->size(), // so far, alignment constraint == size, will change with Valhalla
512 "INHERITED");
513 found = true;
514 break;
515 }
516 }
517 ik = ik->java_super();
518 }
519 break;
520 }
521 case LayoutRawBlock::EMPTY:
522 output->print_cr(" @%d %d/1 %s",
523 b->offset(),
524 b->size(),
525 "EMPTY");
526 break;
527 case LayoutRawBlock::PADDING:
528 output->print_cr(" @%d %d/1 %s",
529 b->offset(),
530 b->size(),
531 "PADDING");
532 break;
533 }
534 b = b->next_block();
535 }
536 }
537
538 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
539 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,
540 FieldLayoutInfo* info, Array<InlineKlass*>* inline_type_field_klasses) :
541 _classname(classname),
542 _super_klass(super_klass),
543 _constant_pool(constant_pool),
544 _field_info(field_info),
545 _info(info),
546 _inline_type_field_klasses(inline_type_field_klasses),
547 _root_group(nullptr),
548 _contended_groups(GrowableArray<FieldGroup*>(8)),
549 _static_fields(nullptr),
550 _layout(nullptr),
551 _static_layout(nullptr),
552 _nonstatic_oopmap_count(0),
553 _alignment(-1),
554 _first_field_offset(-1),
555 _exact_size_in_bytes(-1),
556 _atomic_field_count(0),
557 _fields_size_sum(0),
558 _has_nonstatic_fields(false),
559 _has_inline_type_fields(false),
560 _is_contended(is_contended),
561 _is_inline_type(is_inline_type),
562 _has_flattening_information(is_inline_type),
563 _has_nonatomic_values(false),
564 _nullable_atomic_flat_candidate(false)
565 {}
566
567 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
568 assert(g > 0, "must only be called for named contended groups");
569 FieldGroup* fg = nullptr;
570 for (int i = 0; i < _contended_groups.length(); i++) {
571 fg = _contended_groups.at(i);
572 if (fg->contended_group() == g) return fg;
573 }
574 fg = new FieldGroup(g);
575 _contended_groups.append(fg);
576 return fg;
577 }
578
579 void FieldLayoutBuilder::prologue() {
580 _layout = new FieldLayout(_field_info, _constant_pool);
581 const InstanceKlass* super_klass = _super_klass;
582 _layout->initialize_instance_layout(super_klass);
583 if (super_klass != nullptr) {
584 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
585 }
586 _static_layout = new FieldLayout(_field_info, _constant_pool);
587 _static_layout->initialize_static_layout();
588 _static_fields = new FieldGroup();
589 _root_group = new FieldGroup();
590 }
591
592 // Field sorting for regular (non-inline) classes:
593 // - fields are sorted in static and non-static fields
594 // - non-static fields are also sorted according to their contention group
595 // (support of the @Contended annotation)
596 // - @Contended annotation is ignored for static fields
597 // - field flattening decisions are taken in this method
598 void FieldLayoutBuilder::regular_field_sorting(TRAPS) {
599 int idx = 0;
600 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
601 FieldInfo ctrl = _field_info->at(0);
602 FieldGroup* group = nullptr;
603 FieldInfo fieldinfo = *it;
604 if (fieldinfo.access_flags().is_static()) {
605 group = _static_fields;
606 } else {
607 _has_nonstatic_fields = true;
608 _atomic_field_count++; // we might decrement this
609 if (fieldinfo.field_flags().is_contended()) {
610 int g = fieldinfo.contended_group();
611 if (g == 0) {
612 group = new FieldGroup(true);
613 _contended_groups.append(group);
614 } else {
615 group = get_or_create_contended_group(g);
616 }
617 } else {
618 group = _root_group;
619 }
620 }
621 assert(group != nullptr, "invariant");
622 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
623 switch(type) {
624 case T_BYTE:
625 case T_CHAR:
626 case T_DOUBLE:
627 case T_FLOAT:
628 case T_INT:
629 case T_LONG:
630 case T_SHORT:
631 case T_BOOLEAN:
632 group->add_primitive_field(idx, type);
633 break;
634 case T_OBJECT:
635 case T_ARRAY:
636 if (!fieldinfo.field_flags().is_null_free_inline_type()) {
637 if (group != _static_fields) _nonstatic_oopmap_count++;
638 group->add_oop_field(idx);
639 } else {
640 assert(type != T_ARRAY, "null free ptr to array not supported");
641 _has_inline_type_fields = true;
642 if (group == _static_fields) {
643 // static fields are never flat
644 group->add_oop_field(idx);
645 } else {
646 // Check below is performed for non-static fields, it should be performed for static fields too but at this stage,
647 // it is not guaranteed that the klass of the static field has been loaded, so the test for static fields is delayed
648 // until the linking phase
649 Klass* klass = _inline_type_field_klasses->at(idx);
650 assert(klass != nullptr, "Sanity check");
651 InlineKlass* vk = InlineKlass::cast(klass);
652 assert(vk->is_implicitly_constructible(), "must be, should have been checked in post_process_parsed_stream()");
653 _has_flattening_information = true;
654 // Flattening decision to be taken here
655 // This code assumes all verification already have been performed
656 // (field's type has been loaded and it is an inline klass)
657 bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
658 (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
659 bool too_atomic_to_flatten = vk->must_be_atomic() || AlwaysAtomicAccesses;
660 bool too_volatile_to_flatten = fieldinfo.access_flags().is_volatile();
661 if (vk->is_naturally_atomic()) {
662 too_atomic_to_flatten = false;
663 //too_volatile_to_flatten = false; //FIXME
664 // Currently, volatile fields are never flat, this could change in the future
665 }
666 if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
667 group->add_flat_field(idx, vk);
668 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
669 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
670 if (!vk->is_atomic()) { // flat and non-atomic: take note
671 _has_nonatomic_values = true;
672 _atomic_field_count--; // every other field is atomic but this one
673 }
674 } else {
675 _nonstatic_oopmap_count++;
676 group->add_oop_field(idx);
677 }
678 }
679 }
680 break;
681 default:
682 fatal("Something wrong?");
683 }
684 }
685 _root_group->sort_by_size();
686 _static_fields->sort_by_size();
687 if (!_contended_groups.is_empty()) {
688 for (int i = 0; i < _contended_groups.length(); i++) {
689 _contended_groups.at(i)->sort_by_size();
690 }
691 }
692 }
693
694 /* Field sorting for inline classes:
695 * - because inline classes are immutable, the @Contended annotation is ignored
696 * when computing their layout (with only read operation, there's no false
697 * sharing issue)
698 * - this method also records the alignment of the field with the most
699 * constraining alignment, this value is then used as the alignment
700 * constraint when flattening this inline type into another container
701 * - field flattening decisions are taken in this method (those decisions are
702 * currently only based in the size of the fields to be flattened, the size
703 * of the resulting instance is not considered)
704 */
705 void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) {
706 assert(_is_inline_type, "Should only be used for inline classes");
707 int alignment = 1;
708 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
709 FieldGroup* group = nullptr;
710 FieldInfo fieldinfo = *it;
711 int field_alignment = 1;
712 if (fieldinfo.access_flags().is_static()) {
713 group = _static_fields;
714 } else {
715 _has_nonstatic_fields = true;
716 _atomic_field_count++; // we might decrement this
717 group = _root_group;
718 }
719 assert(group != nullptr, "invariant");
720 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
721 switch(type) {
722 case T_BYTE:
723 case T_CHAR:
724 case T_DOUBLE:
725 case T_FLOAT:
726 case T_INT:
727 case T_LONG:
728 case T_SHORT:
729 case T_BOOLEAN:
730 if (group != _static_fields) {
731 field_alignment = type2aelembytes(type); // alignment == size for primitive types
732 }
733 group->add_primitive_field(fieldinfo.index(), type);
734 break;
735 case T_OBJECT:
736 case T_ARRAY:
737 if (!fieldinfo.field_flags().is_null_free_inline_type()) {
738 if (group != _static_fields) {
739 _nonstatic_oopmap_count++;
740 field_alignment = type2aelembytes(type); // alignment == size for oops
741 }
742 group->add_oop_field(fieldinfo.index());
743 } else {
744 assert(type != T_ARRAY, "null free ptr to array not supported");
745 _has_inline_type_fields = true;
746 if (group == _static_fields) {
747 // static fields are never flat
748 group->add_oop_field(fieldinfo.index());
749 } else {
750 // Check below is performed for non-static fields, it should be performed for static fields too but at this stage,
751 // it is not guaranteed that the klass of the static field has been loaded, so the test for static fields is delayed
752 // until the linking phase
753 Klass* klass = _inline_type_field_klasses->at(fieldinfo.index());
754 assert(klass != nullptr, "Sanity check");
755 InlineKlass* vk = InlineKlass::cast(klass);
756 assert(vk->is_implicitly_constructible(), "must be, should have been checked in post_process_parsed_stream()");
757 // Flattening decision to be taken here
758 // This code assumes all verifications have already been performed
759 // (field's type has been loaded and it is an inline klass)
760 bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
761 (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
762 bool too_atomic_to_flatten = vk->must_be_atomic() || AlwaysAtomicAccesses;
763 bool too_volatile_to_flatten = fieldinfo.access_flags().is_volatile();
764 if (vk->is_naturally_atomic()) {
765 too_atomic_to_flatten = false;
766 //too_volatile_to_flatten = false; //FIXME
767 // Currently, volatile fields are never flat, this could change in the future
768 }
769 if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
770 group->add_flat_field(fieldinfo.index(), vk);
771 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
772 field_alignment = vk->get_alignment();
773 _field_info->adr_at(fieldinfo.index())->field_flags_addr()->update_flat(true);
774 if (!vk->is_atomic()) { // flat and non-atomic: take note
775 _has_nonatomic_values = true;
776 _atomic_field_count--; // every other field is atomic but this one
777 }
778 } else {
779 _nonstatic_oopmap_count++;
780 field_alignment = type2aelembytes(T_OBJECT);
781 group->add_oop_field(fieldinfo.index());
782 }
783 }
784 }
785 break;
786 default:
787 fatal("Unexpected BasicType");
788 }
789 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
790 }
791 _alignment = alignment;
792 if (!_has_nonstatic_fields) {
793 // There are a number of fixes required throughout the type system and JIT
794 Exceptions::fthrow(THREAD_AND_LOCATION,
795 vmSymbols::java_lang_ClassFormatError(),
796 "Value Types do not support zero instance size yet");
797 return;
798 }
799 }
800
801 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
802 if (ContendedPaddingWidth > 0) {
803 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
804 _layout->insert(slot, padding);
805 }
806 }
807
808 /* Computation of regular classes layout is an evolution of the previous default layout
809 * (FieldAllocationStyle 1):
810 * - primitive fields (both primitive types and flat inline types) are allocated
811 * first, from the biggest to the smallest
812 * - then oop fields are allocated (to increase chances to have contiguous oops and
813 * a simpler oopmap).
814 */
815 void FieldLayoutBuilder::compute_regular_layout(TRAPS) {
816 bool need_tail_padding = false;
817 prologue();
818 regular_field_sorting(CHECK);
819 if (_is_contended) {
820 _layout->set_start(_layout->last_block());
821 // insertion is currently easy because the current strategy doesn't try to fill holes
822 // in super classes layouts => the _start block is by consequence the _last_block
823 insert_contended_padding(_layout->start());
824 need_tail_padding = true;
825 }
826 _layout->add(_root_group->big_primitive_fields());
827 _layout->add(_root_group->small_primitive_fields());
828 _layout->add(_root_group->oop_fields());
829
830 if (!_contended_groups.is_empty()) {
831 for (int i = 0; i < _contended_groups.length(); i++) {
832 FieldGroup* cg = _contended_groups.at(i);
833 LayoutRawBlock* start = _layout->last_block();
834 insert_contended_padding(start);
835 _layout->add(cg->big_primitive_fields());
836 _layout->add(cg->small_primitive_fields(), start);
837 _layout->add(cg->oop_fields(), start);
838 need_tail_padding = true;
839 }
840 }
841
842 if (need_tail_padding) {
843 insert_contended_padding(_layout->last_block());
844 }
845 // Warning: IntanceMirrorKlass expects static oops to be allocated first
846 _static_layout->add_contiguously(_static_fields->oop_fields());
847 _static_layout->add(_static_fields->big_primitive_fields());
848 _static_layout->add(_static_fields->small_primitive_fields());
849
850 epilogue();
851 }
852
853 /* Computation of inline classes has a slightly different strategy than for
854 * regular classes. Regular classes have their oop fields allocated at the end
855 * of the layout to increase GC performances. Unfortunately, this strategy
856 * increases the number of empty slots inside an instance. Because the purpose
857 * of inline classes is to be embedded into other containers, it is critical
858 * to keep their size as small as possible. For this reason, the allocation
859 * strategy is:
860 * - big primitive fields (primitive types and flat inline type smaller
861 * than an oop) are allocated first (from the biggest to the smallest)
862 * - then oop fields
863 * - then small primitive fields (from the biggest to the smallest)
864 */
865 void FieldLayoutBuilder::compute_inline_class_layout(TRAPS) {
866 prologue();
867 inline_class_field_sorting(CHECK);
868 // Inline types are not polymorphic, so they cannot inherit fields.
869 // By consequence, at this stage, the layout must be composed of a RESERVED
870 // block, followed by an EMPTY block.
871 assert(_layout->start()->kind() == LayoutRawBlock::RESERVED, "Unexpected");
872 assert(_layout->start()->next_block()->kind() == LayoutRawBlock::EMPTY, "Unexpected");
873 LayoutRawBlock* first_empty = _layout->start()->next_block();
874 if (first_empty->offset() % _alignment != 0) {
875 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _alignment - (first_empty->offset() % _alignment));
876 _layout->insert(first_empty, padding);
877 _layout->set_start(padding->next_block());
878 }
879
880 _layout->add(_root_group->big_primitive_fields());
881 _layout->add(_root_group->oop_fields());
882 _layout->add(_root_group->small_primitive_fields());
883
884 LayoutRawBlock* first_field = _layout->first_field_block();
885 if (first_field != nullptr) {
886 _first_field_offset = _layout->first_field_block()->offset();
887 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
888 } else {
889 // special case for empty value types
890 _first_field_offset = _layout->blocks()->size();
891 _exact_size_in_bytes = 0;
892 }
893 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
894
895 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
896 _static_layout->add_contiguously(_static_fields->oop_fields());
897 _static_layout->add(_static_fields->big_primitive_fields());
898 _static_layout->add(_static_fields->small_primitive_fields());
899
900 epilogue();
901 }
902
903 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
904 InlineKlass* vklass, int offset) {
905 int diff = offset - vklass->first_field_offset();
906 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
907 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
908 while (map < last_map) {
909 nonstatic_oop_maps->add(map->offset() + diff, map->count());
910 map++;
911 }
912 }
913
914 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
915 if (list != nullptr) {
916 for (int i = 0; i < list->length(); i++) {
917 LayoutRawBlock* f = list->at(i);
918 if (f->kind() == LayoutRawBlock::FLAT) {
919 InlineKlass* vk = f->inline_klass();
920 assert(vk != nullptr, "Should have been initialized");
921 if (vk->contains_oops()) {
922 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
923 }
924 }
925 }
926 }
927 }
928
929 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
930 if (group->oop_fields() != nullptr) {
931 for (int i = 0; i < group->oop_fields()->length(); i++) {
932 LayoutRawBlock* b = group->oop_fields()->at(i);
933 nonstatic_oop_maps->add(b->offset(), 1);
934 }
935 }
936 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
937 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
938 }
939
940 void FieldLayoutBuilder::epilogue() {
941 // Computing oopmaps
942 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
943 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
944 OopMapBlocksBuilder* nonstatic_oop_maps =
945 new OopMapBlocksBuilder(max_oop_map_count);
946 if (super_oop_map_count > 0) {
947 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
948 _super_klass->nonstatic_oop_map_count());
949 }
950 register_embedded_oops(nonstatic_oop_maps, _root_group);
951 if (!_contended_groups.is_empty()) {
952 for (int i = 0; i < _contended_groups.length(); i++) {
953 FieldGroup* cg = _contended_groups.at(i);
954 if (cg->oop_count() > 0) {
955 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
956 register_embedded_oops(nonstatic_oop_maps, cg);
957 }
958 }
959 }
960 nonstatic_oop_maps->compact();
961
962 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
963 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
964 int static_fields_size = (static_fields_end -
965 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
966 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
967
968 // Pass back information needed for InstanceKlass creation
969
970 _info->oop_map_blocks = nonstatic_oop_maps;
971 _info->_instance_size = align_object_size(instance_end / wordSize);
972 _info->_static_field_size = static_fields_size;
973 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
974 _info->_has_nonstatic_fields = _has_nonstatic_fields;
975 _info->_has_inline_fields = _has_inline_type_fields;
976
977 // An inline type is naturally atomic if it has just one field, and
978 // that field is simple enough.
979 _info->_is_naturally_atomic = (_is_inline_type &&
980 (_atomic_field_count <= 1) &&
981 !_has_nonatomic_values &&
982 _contended_groups.is_empty());
983 // This may be too restrictive, since if all the fields fit in 64
984 // bits we could make the decision to align instances of this class
985 // to 64-bit boundaries, and load and store them as single words.
986 // And on machines which supported larger atomics we could similarly
987 // allow larger values to be atomic, if properly aligned.
988
989
990 if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
991 ResourceMark rm;
992 tty->print_cr("Layout of class %s", _classname->as_C_string());
993 tty->print_cr("Instance fields:");
994 _layout->print(tty, false, _super_klass);
995 tty->print_cr("Static fields:");
996 _static_layout->print(tty, true, nullptr);
997 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
998 if (_is_inline_type) {
999 tty->print_cr("First field offset = %d", _first_field_offset);
1000 tty->print_cr("Alignment = %d bytes", _alignment);
1001 tty->print_cr("Exact size = %d bytes", _exact_size_in_bytes);
1002 }
1003 tty->print_cr("---");
1004 }
1005 }
1006
1007 void FieldLayoutBuilder::build_layout(TRAPS) {
1008 if (_is_inline_type) {
1009 compute_inline_class_layout(CHECK);
1010 } else {
1011 compute_regular_layout(CHECK);
1012 }
1013 }
|