1 /*
2 * Copyright (c) 2020, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "jvm.h"
27 #include "classfile/classFileParser.hpp"
28 #include "classfile/fieldLayoutBuilder.hpp"
29 #include "memory/resourceArea.hpp"
30 #include "oops/array.hpp"
31 #include "oops/fieldStreams.inline.hpp"
32 #include "oops/instanceMirrorKlass.hpp"
33 #include "oops/instanceKlass.inline.hpp"
34 #include "oops/klass.inline.hpp"
35 #include "runtime/fieldDescriptor.inline.hpp"
36
37
38 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
39 _next_block(NULL),
40 _prev_block(NULL),
41 _kind(kind),
42 _offset(-1),
43 _alignment(1),
44 _size(size),
45 _field_index(-1),
46 _is_reference(false) {
47 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
48 "Otherwise, should use the constructor with a field index argument");
49 assert(size > 0, "Sanity check");
50 }
51
52
53 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
54 _next_block(NULL),
55 _prev_block(NULL),
56 _kind(kind),
57 _offset(-1),
58 _alignment(alignment),
59 _size(size),
60 _field_index(index),
61 _is_reference(is_reference) {
62 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
63 "Other kind do not have a field index");
64 assert(size > 0, "Sanity check");
65 assert(alignment > 0, "Sanity check");
66 }
67
68 bool LayoutRawBlock::fit(int size, int alignment) {
69 int adjustment = 0;
70 if ((_offset % alignment) != 0) {
71 adjustment = alignment - (_offset % alignment);
72 }
73 return _size >= size + adjustment;
74 }
75
76 FieldGroup::FieldGroup(int contended_group) :
77 _next(NULL),
78 _primitive_fields(NULL),
79 _oop_fields(NULL),
80 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
81 _oop_count(0) {}
82
83 void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) {
84 int size = type2aelembytes(type);
85 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
86 if (_primitive_fields == NULL) {
87 _primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
88 }
89 _primitive_fields->append(block);
90 }
91
92 void FieldGroup::add_oop_field(AllFieldStream fs) {
93 int size = type2aelembytes(T_OBJECT);
94 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
95 if (_oop_fields == NULL) {
96 _oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
97 }
98 _oop_fields->append(block);
99 _oop_count++;
100 }
101
102 void FieldGroup::sort_by_size() {
103 if (_primitive_fields != NULL) {
104 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
105 }
106 }
107
108 FieldLayout::FieldLayout(Array<u2>* fields, ConstantPool* cp) :
109 _fields(fields),
110 _cp(cp),
111 _blocks(NULL),
112 _start(_blocks),
113 _last(_blocks) {}
114
115 void FieldLayout::initialize_static_layout() {
116 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
117 _blocks->set_offset(0);
118 _last = _blocks;
119 _start = _blocks;
120 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
121 // during bootstrapping, the size of the java.lang.Class is still not known when layout
122 // of static field is computed. Field offsets are fixed later when the size is known
123 // (see java_lang_Class::fixup_mirror())
124 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
125 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
126 _blocks->set_offset(0);
127 }
128 }
129
130 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
131 if (super_klass == NULL) {
132 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
133 _blocks->set_offset(0);
134 _last = _blocks;
135 _start = _blocks;
136 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
137 } else {
138 bool has_fields = reconstruct_layout(super_klass);
139 fill_holes(super_klass);
140 if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) {
141 _start = _blocks; // start allocating fields from the first empty block
142 } else {
143 _start = _last; // append fields at the end of the reconstructed layout
144 }
145 }
146 }
147
148 LayoutRawBlock* FieldLayout::first_field_block() {
149 LayoutRawBlock* block = _start;
150 while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
151 && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
152 block = block->next_block();
153 }
154 return block;
155 }
156
157
158 // Insert a set of fields into a layout using a best-fit strategy.
159 // For each field, search for the smallest empty slot able to fit the field
160 // (satisfying both size and alignment requirements), if none is found,
161 // add the field at the end of the layout.
162 // Fields cannot be inserted before the block specified in the "start" argument
163 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
164 if (list == NULL) return;
165 if (start == NULL) start = this->_start;
166 bool last_search_success = false;
167 int last_size = 0;
168 int last_alignment = 0;
169 for (int i = 0; i < list->length(); i ++) {
170 LayoutRawBlock* b = list->at(i);
171 LayoutRawBlock* cursor = NULL;
172 LayoutRawBlock* candidate = NULL;
173
174 // if start is the last block, just append the field
175 if (start == last_block()) {
176 candidate = last_block();
177 }
178 // Before iterating over the layout to find an empty slot fitting the field's requirements,
179 // check if the previous field had the same requirements and if the search for a fitting slot
180 // was successful. If the requirements were the same but the search failed, a new search will
181 // fail the same way, so just append the field at the of the layout.
182 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
183 candidate = last_block();
184 } else {
185 // Iterate over the layout to find an empty slot fitting the field's requirements
186 last_size = b->size();
187 last_alignment = b->alignment();
188 cursor = last_block()->prev_block();
189 assert(cursor != NULL, "Sanity check");
190 last_search_success = true;
191 while (cursor != start) {
192 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
193 if (candidate == NULL || cursor->size() < candidate->size()) {
194 candidate = cursor;
195 }
196 }
197 cursor = cursor->prev_block();
198 }
199 if (candidate == NULL) {
200 candidate = last_block();
201 last_search_success = false;
202 }
203 assert(candidate != NULL, "Candidate must not be null");
204 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
205 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
206 }
207
208 insert_field_block(candidate, b);
209 }
210 }
211
212 // Used for classes with hard coded field offsets, insert a field at the specified offset */
213 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
214 assert(block != NULL, "Sanity check");
215 block->set_offset(offset);
216 if (start == NULL) {
217 start = this->_start;
218 }
219 LayoutRawBlock* slot = start;
220 while (slot != NULL) {
221 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
222 slot == _last){
223 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
224 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
225 if (slot->offset() < block->offset()) {
226 int adjustment = block->offset() - slot->offset();
227 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
286 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
287 insert(slot, adj);
288 }
289 insert(slot, block);
290 if (slot->size() == 0) {
291 remove(slot);
292 }
293 FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
294 return block;
295 }
296
297 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
298 bool has_instance_fields = false;
299 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
300 while (ik != NULL) {
301 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
302 BasicType type = Signature::basic_type(fs.signature());
303 // distinction between static and non-static fields is missing
304 if (fs.access_flags().is_static()) continue;
305 has_instance_fields = true;
306 int size = type2aelembytes(type);
307 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
308 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
309 block->set_offset(fs.offset());
310 all_fields->append(block);
311 }
312 ik = ik->super() == NULL ? NULL : InstanceKlass::cast(ik->super());
313 }
314
315 all_fields->sort(LayoutRawBlock::compare_offset);
316 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
317 _blocks->set_offset(0);
318 _last = _blocks;
319
320 for(int i = 0; i < all_fields->length(); i++) {
321 LayoutRawBlock* b = all_fields->at(i);
322 _last->set_next_block(b);
323 b->set_prev_block(_last);
324 _last = b;
325 }
326 _start = _blocks;
327 return has_instance_fields;
328 }
329
330 // Called during the reconstruction of a layout, after fields from super
331 // classes have been inserted. It fills unused slots between inserted fields
332 // with EMPTY blocks, so the regular field insertion methods would work.
333 // This method handles classes with @Contended annotations differently
334 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
335 // fields to interfere with contended fields/classes.
336 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
337 assert(_blocks != NULL, "Sanity check");
338 assert(_blocks->offset() == 0, "first block must be at offset zero");
339 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
340 LayoutRawBlock* b = _blocks;
341 while (b->next_block() != NULL) {
342 if (b->next_block()->offset() > (b->offset() + b->size())) {
343 int size = b->next_block()->offset() - (b->offset() + b->size());
344 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
345 empty->set_offset(b->offset() + b->size());
346 empty->set_next_block(b->next_block());
347 b->next_block()->set_prev_block(empty);
348 b->set_next_block(empty);
349 empty->set_prev_block(b);
350 }
351 b = b->next_block();
352 }
353 assert(b->next_block() == NULL, "Invariant at this point");
354 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
355
356 // If the super class has @Contended annotation, a padding block is
357 // inserted at the end to ensure that fields from the subclasses won't share
358 // the cache line of the last field of the contended class
359 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
360 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
361 p->set_offset(b->offset() + b->size());
362 b->set_next_block(p);
363 p->set_prev_block(b);
364 b = p;
365 }
366
367 if (!UseEmptySlotsInSupers) {
368 // Add an empty slots to align fields of the subclass on a heapOopSize boundary
369 // in order to emulate the behavior of the previous algorithm
370 int align = (b->offset() + b->size()) % heapOopSize;
371 if (align != 0) {
372 int sz = heapOopSize - align;
373 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
374 p->set_offset(b->offset() + b->size());
375 b->set_next_block(p);
376 p->set_prev_block(b);
377 b = p;
378 }
379 }
380
381 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
382 last->set_offset(b->offset() + b->size());
383 assert(last->offset() > 0, "Sanity check");
384 b->set_next_block(last);
385 last->set_prev_block(b);
386 _last = last;
387 }
388
389 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
390 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
391 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
392 block->set_offset(slot->offset());
393 slot->set_offset(slot->offset() + block->size());
394 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
395 assert(slot->size() - block->size() >= 0, "no negative size allowed");
396 slot->set_size(slot->size() - block->size());
397 block->set_prev_block(slot->prev_block());
398 block->set_next_block(slot);
399 slot->set_prev_block(block);
400 if (block->prev_block() != NULL) {
412 if (_blocks == block) {
413 _blocks = block->next_block();
414 if (_blocks != NULL) {
415 _blocks->set_prev_block(NULL);
416 }
417 } else {
418 assert(block->prev_block() != NULL, "_prev should be set for non-head blocks");
419 block->prev_block()->set_next_block(block->next_block());
420 block->next_block()->set_prev_block(block->prev_block());
421 }
422 if (block == _start) {
423 _start = block->prev_block();
424 }
425 }
426
427 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
428 ResourceMark rm;
429 LayoutRawBlock* b = _blocks;
430 while(b != _last) {
431 switch(b->kind()) {
432 case LayoutRawBlock::REGULAR: {
433 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
434 output->print_cr(" @%d \"%s\" %s %d/%d %s",
435 b->offset(),
436 fi->name(_cp)->as_C_string(),
437 fi->signature(_cp)->as_C_string(),
438 b->size(),
439 b->alignment(),
440 "REGULAR");
441 break;
442 }
443 case LayoutRawBlock::FLATTENED: {
444 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
445 output->print_cr(" @%d \"%s\" %s %d/%d %s",
446 b->offset(),
447 fi->name(_cp)->as_C_string(),
448 fi->signature(_cp)->as_C_string(),
449 b->size(),
450 b->alignment(),
451 "FLATTENED");
452 break;
453 }
454 case LayoutRawBlock::RESERVED: {
455 output->print_cr(" @%d %d/- %s",
456 b->offset(),
457 b->size(),
458 "RESERVED");
459 break;
460 }
461 case LayoutRawBlock::INHERITED: {
462 assert(!is_static, "Static fields are not inherited in layouts");
463 assert(super != NULL, "super klass must be provided to retrieve inherited fields info");
464 bool found = false;
465 const InstanceKlass* ik = super;
466 while (!found && ik != NULL) {
467 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
468 if (fs.offset() == b->offset()) {
469 output->print_cr(" @%d \"%s\" %s %d/%d %s",
470 b->offset(),
471 fs.name()->as_C_string(),
472 fs.signature()->as_C_string(),
473 b->size(),
474 b->size(), // so far, alignment constraint == size, will change with Valhalla
475 "INHERITED");
476 found = true;
477 break;
478 }
479 }
480 ik = ik->java_super();
481 }
482 break;
483 }
484 case LayoutRawBlock::EMPTY:
485 output->print_cr(" @%d %d/1 %s",
486 b->offset(),
487 b->size(),
488 "EMPTY");
489 break;
490 case LayoutRawBlock::PADDING:
491 output->print_cr(" @%d %d/1 %s",
492 b->offset(),
493 b->size(),
494 "PADDING");
495 break;
496 }
497 b = b->next_block();
498 }
499 }
500
501 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
502 Array<u2>* fields, bool is_contended, FieldLayoutInfo* info) :
503 _classname(classname),
504 _super_klass(super_klass),
505 _constant_pool(constant_pool),
506 _fields(fields),
507 _info(info),
508 _root_group(NULL),
509 _contended_groups(GrowableArray<FieldGroup*>(8)),
510 _static_fields(NULL),
511 _layout(NULL),
512 _static_layout(NULL),
513 _nonstatic_oopmap_count(0),
514 _alignment(-1),
515 _has_nonstatic_fields(false),
516 _is_contended(is_contended) {}
517
518
519 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
520 assert(g > 0, "must only be called for named contended groups");
521 FieldGroup* fg = NULL;
522 for (int i = 0; i < _contended_groups.length(); i++) {
523 fg = _contended_groups.at(i);
524 if (fg->contended_group() == g) return fg;
525 }
526 fg = new FieldGroup(g);
527 _contended_groups.append(fg);
528 return fg;
529 }
530
531 void FieldLayoutBuilder::prologue() {
532 _layout = new FieldLayout(_fields, _constant_pool);
533 const InstanceKlass* super_klass = _super_klass;
534 _layout->initialize_instance_layout(super_klass);
535 if (super_klass != NULL) {
536 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
537 }
538 _static_layout = new FieldLayout(_fields, _constant_pool);
539 _static_layout->initialize_static_layout();
540 _static_fields = new FieldGroup();
541 _root_group = new FieldGroup();
542 }
543
544 // Field sorting for regular classes:
545 // - fields are sorted in static and non-static fields
546 // - non-static fields are also sorted according to their contention group
547 // (support of the @Contended annotation)
548 // - @Contended annotation is ignored for static fields
549 void FieldLayoutBuilder::regular_field_sorting() {
550 for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
551 FieldGroup* group = NULL;
552 if (fs.access_flags().is_static()) {
553 group = _static_fields;
554 } else {
555 _has_nonstatic_fields = true;
556 if (fs.is_contended()) {
557 int g = fs.contended_group();
558 if (g == 0) {
559 group = new FieldGroup(true);
560 _contended_groups.append(group);
561 } else {
562 group = get_or_create_contended_group(g);
563 }
564 } else {
565 group = _root_group;
566 }
567 }
568 assert(group != NULL, "invariant");
569 BasicType type = Signature::basic_type(fs.signature());
570 switch(type) {
571 case T_BYTE:
572 case T_CHAR:
573 case T_DOUBLE:
574 case T_FLOAT:
575 case T_INT:
576 case T_LONG:
577 case T_SHORT:
578 case T_BOOLEAN:
579 group->add_primitive_field(fs, type);
580 break;
581 case T_OBJECT:
582 case T_ARRAY:
583 if (group != _static_fields) _nonstatic_oopmap_count++;
584 group->add_oop_field(fs);
585 break;
586 default:
587 fatal("Something wrong?");
588 }
589 }
590 _root_group->sort_by_size();
591 _static_fields->sort_by_size();
592 if (!_contended_groups.is_empty()) {
593 for (int i = 0; i < _contended_groups.length(); i++) {
594 _contended_groups.at(i)->sort_by_size();
595 }
596 }
597 }
598
599 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
600 if (ContendedPaddingWidth > 0) {
601 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
602 _layout->insert(slot, padding);
603 }
604 }
605
606 // Computation of regular classes layout is an evolution of the previous default layout
607 // (FieldAllocationStyle 1):
608 // - primitive fields are allocated first (from the biggest to the smallest)
609 // - then oop fields are allocated, either in existing gaps or at the end of
610 // the layout
611 void FieldLayoutBuilder::compute_regular_layout() {
612 bool need_tail_padding = false;
613 prologue();
614 regular_field_sorting();
615
616 if (_is_contended) {
617 _layout->set_start(_layout->last_block());
618 // insertion is currently easy because the current strategy doesn't try to fill holes
619 // in super classes layouts => the _start block is by consequence the _last_block
620 insert_contended_padding(_layout->start());
621 need_tail_padding = true;
622 }
623 _layout->add(_root_group->primitive_fields());
624 _layout->add(_root_group->oop_fields());
625
626 if (!_contended_groups.is_empty()) {
627 for (int i = 0; i < _contended_groups.length(); i++) {
628 FieldGroup* cg = _contended_groups.at(i);
629 LayoutRawBlock* start = _layout->last_block();
630 insert_contended_padding(start);
631 _layout->add(cg->primitive_fields(), start);
632 _layout->add(cg->oop_fields(), start);
633 need_tail_padding = true;
634 }
635 }
636
637 if (need_tail_padding) {
638 insert_contended_padding(_layout->last_block());
639 }
640
641 _static_layout->add_contiguously(this->_static_fields->oop_fields());
642 _static_layout->add(this->_static_fields->primitive_fields());
643
644 epilogue();
645 }
646
647 void FieldLayoutBuilder::epilogue() {
648 // Computing oopmaps
649 int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
650 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
651
652 OopMapBlocksBuilder* nonstatic_oop_maps =
653 new OopMapBlocksBuilder(max_oop_map_count);
654 if (super_oop_map_count > 0) {
655 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
656 _super_klass->nonstatic_oop_map_count());
657 }
658
659 if (_root_group->oop_fields() != NULL) {
660 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
661 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
662 nonstatic_oop_maps->add(b->offset(), 1);
663 }
664 }
665
666 if (!_contended_groups.is_empty()) {
667 for (int i = 0; i < _contended_groups.length(); i++) {
668 FieldGroup* cg = _contended_groups.at(i);
669 if (cg->oop_count() > 0) {
670 assert(cg->oop_fields() != NULL && cg->oop_fields()->at(0) != NULL, "oop_count > 0 but no oop fields found");
671 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
672 }
673 }
674 }
675
676 nonstatic_oop_maps->compact();
677
678 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
679 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
680 int static_fields_size = (static_fields_end -
681 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
682 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
683
684 // Pass back information needed for InstanceKlass creation
685
686 _info->oop_map_blocks = nonstatic_oop_maps;
687 _info->_instance_size = align_object_size(instance_end / wordSize);
688 _info->_static_field_size = static_fields_size;
689 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
690 _info->_has_nonstatic_fields = _has_nonstatic_fields;
691
692 if (PrintFieldLayout) {
693 ResourceMark rm;
694 tty->print_cr("Layout of class %s", _classname->as_C_string());
695 tty->print_cr("Instance fields:");
696 _layout->print(tty, false, _super_klass);
697 tty->print_cr("Static fields:");
698 _static_layout->print(tty, true, NULL);
699 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
700 tty->print_cr("---");
701 }
702 }
703
704 void FieldLayoutBuilder::build_layout() {
705 compute_regular_layout();
706 }
|
1 /*
2 * Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "jvm.h"
27 #include "classfile/classFileParser.hpp"
28 #include "classfile/fieldLayoutBuilder.hpp"
29 #include "classfile/systemDictionary.hpp"
30 #include "classfile/vmSymbols.hpp"
31 #include "memory/resourceArea.hpp"
32 #include "oops/array.hpp"
33 #include "oops/fieldStreams.inline.hpp"
34 #include "oops/instanceMirrorKlass.hpp"
35 #include "oops/instanceKlass.inline.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "oops/inlineKlass.inline.hpp"
38 #include "runtime/fieldDescriptor.inline.hpp"
39
40 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
41 _next_block(NULL),
42 _prev_block(NULL),
43 _inline_klass(NULL),
44 _kind(kind),
45 _offset(-1),
46 _alignment(1),
47 _size(size),
48 _field_index(-1),
49 _is_reference(false) {
50 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
51 "Otherwise, should use the constructor with a field index argument");
52 assert(size > 0, "Sanity check");
53 }
54
55
56 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
57 _next_block(NULL),
58 _prev_block(NULL),
59 _inline_klass(NULL),
60 _kind(kind),
61 _offset(-1),
62 _alignment(alignment),
63 _size(size),
64 _field_index(index),
65 _is_reference(is_reference) {
66 assert(kind == REGULAR || kind == INLINED || kind == INHERITED,
67 "Other kind do not have a field index");
68 assert(size > 0, "Sanity check");
69 assert(alignment > 0, "Sanity check");
70 }
71
72 bool LayoutRawBlock::fit(int size, int alignment) {
73 int adjustment = 0;
74 if ((_offset % alignment) != 0) {
75 adjustment = alignment - (_offset % alignment);
76 }
77 return _size >= size + adjustment;
78 }
79
80 FieldGroup::FieldGroup(int contended_group) :
81 _next(NULL),
82 _small_primitive_fields(NULL),
83 _big_primitive_fields(NULL),
84 _oop_fields(NULL),
85 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
86 _oop_count(0) {}
87
88 void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) {
89 int size = type2aelembytes(type);
90 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
91 if (size >= oopSize) {
92 add_to_big_primitive_list(block);
93 } else {
94 add_to_small_primitive_list(block);
95 }
96 }
97
98 void FieldGroup::add_oop_field(AllFieldStream fs) {
99 int size = type2aelembytes(T_OBJECT);
100 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
101 if (_oop_fields == NULL) {
102 _oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
103 }
104 _oop_fields->append(block);
105 _oop_count++;
106 }
107
108 void FieldGroup::add_inlined_field(AllFieldStream fs, InlineKlass* vk) {
109 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INLINED, vk->get_exact_size_in_bytes(), vk->get_alignment(), false);
110 block->set_inline_klass(vk);
111 if (block->size() >= oopSize) {
112 add_to_big_primitive_list(block);
113 } else {
114 add_to_small_primitive_list(block);
115 }
116 }
117
118 void FieldGroup::sort_by_size() {
119 if (_small_primitive_fields != NULL) {
120 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
121 }
122 if (_big_primitive_fields != NULL) {
123 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
124 }
125 }
126
127 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
128 if (_small_primitive_fields == NULL) {
129 _small_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
130 }
131 _small_primitive_fields->append(block);
132 }
133
134 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
135 if (_big_primitive_fields == NULL) {
136 _big_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
137 }
138 _big_primitive_fields->append(block);
139 }
140
141 FieldLayout::FieldLayout(Array<u2>* fields, ConstantPool* cp) :
142 _fields(fields),
143 _cp(cp),
144 _blocks(NULL),
145 _start(_blocks),
146 _last(_blocks) {}
147
148 void FieldLayout::initialize_static_layout() {
149 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
150 _blocks->set_offset(0);
151 _last = _blocks;
152 _start = _blocks;
153 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
154 // during bootstrapping, the size of the java.lang.Class is still not known when layout
155 // of static field is computed. Field offsets are fixed later when the size is known
156 // (see java_lang_Class::fixup_mirror())
157 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
158 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
159 _blocks->set_offset(0);
160 }
161 }
162
163 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
164 if (super_klass == NULL) {
165 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
166 _blocks->set_offset(0);
167 _last = _blocks;
168 _start = _blocks;
169 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
170 } else {
171 bool has_fields = reconstruct_layout(super_klass);
172 fill_holes(super_klass);
173 if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) {
174 _start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses
175 // to allocate fields in empty slots of their super classes
176 } else {
177 _start = _last; // append fields at the end of the reconstructed layout
178 }
179 }
180 }
181
182 LayoutRawBlock* FieldLayout::first_field_block() {
183 LayoutRawBlock* block = _blocks;
184 while (block != NULL
185 && block->kind() != LayoutRawBlock::INHERITED
186 && block->kind() != LayoutRawBlock::REGULAR
187 && block->kind() != LayoutRawBlock::INLINED) {
188 block = block->next_block();
189 }
190 return block;
191 }
192
193 // Insert a set of fields into a layout.
194 // For each field, search for an empty slot able to fit the field
195 // (satisfying both size and alignment requirements), if none is found,
196 // add the field at the end of the layout.
197 // Fields cannot be inserted before the block specified in the "start" argument
198 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
199 if (list == NULL) return;
200 if (start == NULL) start = this->_start;
201 bool last_search_success = false;
202 int last_size = 0;
203 int last_alignment = 0;
204 for (int i = 0; i < list->length(); i ++) {
205 LayoutRawBlock* b = list->at(i);
206 LayoutRawBlock* cursor = NULL;
207 LayoutRawBlock* candidate = NULL;
208 // if start is the last block, just append the field
209 if (start == last_block()) {
210 candidate = last_block();
211 }
212 // Before iterating over the layout to find an empty slot fitting the field's requirements,
213 // check if the previous field had the same requirements and if the search for a fitting slot
214 // was successful. If the requirements were the same but the search failed, a new search will
215 // fail the same way, so just append the field at the of the layout.
216 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
217 candidate = last_block();
218 } else {
219 // Iterate over the layout to find an empty slot fitting the field's requirements
220 last_size = b->size();
221 last_alignment = b->alignment();
222 cursor = last_block()->prev_block();
223 assert(cursor != NULL, "Sanity check");
224 last_search_success = true;
225
226 while (cursor != start) {
227 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
228 if (candidate == NULL || cursor->size() < candidate->size()) {
229 candidate = cursor;
230 }
231 }
232 cursor = cursor->prev_block();
233 }
234 if (candidate == NULL) {
235 candidate = last_block();
236 last_search_success = false;
237 }
238 assert(candidate != NULL, "Candidate must not be null");
239 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
240 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
241 }
242 insert_field_block(candidate, b);
243 }
244 }
245
246 // Used for classes with hard coded field offsets, insert a field at the specified offset */
247 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
248 assert(block != NULL, "Sanity check");
249 block->set_offset(offset);
250 if (start == NULL) {
251 start = this->_start;
252 }
253 LayoutRawBlock* slot = start;
254 while (slot != NULL) {
255 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
256 slot == _last){
257 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
258 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
259 if (slot->offset() < block->offset()) {
260 int adjustment = block->offset() - slot->offset();
261 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
320 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
321 insert(slot, adj);
322 }
323 insert(slot, block);
324 if (slot->size() == 0) {
325 remove(slot);
326 }
327 FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
328 return block;
329 }
330
331 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
332 bool has_instance_fields = false;
333 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
334 while (ik != NULL) {
335 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
336 BasicType type = Signature::basic_type(fs.signature());
337 // distinction between static and non-static fields is missing
338 if (fs.access_flags().is_static()) continue;
339 has_instance_fields = true;
340 LayoutRawBlock* block;
341 if (type == T_PRIMITIVE_OBJECT) {
342 InlineKlass* vk = InlineKlass::cast(ik->get_inline_type_field_klass(fs.index()));
343 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, vk->get_exact_size_in_bytes(),
344 vk->get_alignment(), false);
345
346 } else {
347 int size = type2aelembytes(type);
348 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
349 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
350 }
351 block->set_offset(fs.offset());
352 all_fields->append(block);
353 }
354 ik = ik->super() == NULL ? NULL : InstanceKlass::cast(ik->super());
355 }
356 all_fields->sort(LayoutRawBlock::compare_offset);
357 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
358 _blocks->set_offset(0);
359 _last = _blocks;
360 for(int i = 0; i < all_fields->length(); i++) {
361 LayoutRawBlock* b = all_fields->at(i);
362 _last->set_next_block(b);
363 b->set_prev_block(_last);
364 _last = b;
365 }
366 _start = _blocks;
367 return has_instance_fields;
368 }
369
370 // Called during the reconstruction of a layout, after fields from super
371 // classes have been inserted. It fills unused slots between inserted fields
372 // with EMPTY blocks, so the regular field insertion methods would work.
373 // This method handles classes with @Contended annotations differently
374 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
375 // fields to interfere with contended fields/classes.
376 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
377 assert(_blocks != NULL, "Sanity check");
378 assert(_blocks->offset() == 0, "first block must be at offset zero");
379 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
380 LayoutRawBlock* b = _blocks;
381 while (b->next_block() != NULL) {
382 if (b->next_block()->offset() > (b->offset() + b->size())) {
383 int size = b->next_block()->offset() - (b->offset() + b->size());
384 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
385 empty->set_offset(b->offset() + b->size());
386 empty->set_next_block(b->next_block());
387 b->next_block()->set_prev_block(empty);
388 b->set_next_block(empty);
389 empty->set_prev_block(b);
390 }
391 b = b->next_block();
392 }
393 assert(b->next_block() == NULL, "Invariant at this point");
394 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
395 // If the super class has @Contended annotation, a padding block is
396 // inserted at the end to ensure that fields from the subclasses won't share
397 // the cache line of the last field of the contended class
398 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
399 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
400 p->set_offset(b->offset() + b->size());
401 b->set_next_block(p);
402 p->set_prev_block(b);
403 b = p;
404 }
405 if (!UseEmptySlotsInSupers) {
406 // Add an empty slots to align fields of the subclass on a heapOopSize boundary
407 // in order to emulate the behavior of the previous algorithm
408 int align = (b->offset() + b->size()) % heapOopSize;
409 if (align != 0) {
410 int sz = heapOopSize - align;
411 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
412 p->set_offset(b->offset() + b->size());
413 b->set_next_block(p);
414 p->set_prev_block(b);
415 b = p;
416 }
417 }
418 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
419 last->set_offset(b->offset() + b->size());
420 assert(last->offset() > 0, "Sanity check");
421 b->set_next_block(last);
422 last->set_prev_block(b);
423 _last = last;
424 }
425
426 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
427 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
428 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
429 block->set_offset(slot->offset());
430 slot->set_offset(slot->offset() + block->size());
431 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
432 assert(slot->size() - block->size() >= 0, "no negative size allowed");
433 slot->set_size(slot->size() - block->size());
434 block->set_prev_block(slot->prev_block());
435 block->set_next_block(slot);
436 slot->set_prev_block(block);
437 if (block->prev_block() != NULL) {
449 if (_blocks == block) {
450 _blocks = block->next_block();
451 if (_blocks != NULL) {
452 _blocks->set_prev_block(NULL);
453 }
454 } else {
455 assert(block->prev_block() != NULL, "_prev should be set for non-head blocks");
456 block->prev_block()->set_next_block(block->next_block());
457 block->next_block()->set_prev_block(block->prev_block());
458 }
459 if (block == _start) {
460 _start = block->prev_block();
461 }
462 }
463
464 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
465 ResourceMark rm;
466 LayoutRawBlock* b = _blocks;
467 while(b != _last) {
468 switch(b->kind()) {
469 case LayoutRawBlock::REGULAR: {
470 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
471 output->print_cr(" @%d \"%s\" %s %d/%d %s",
472 b->offset(),
473 fi->name(_cp)->as_C_string(),
474 fi->signature(_cp)->as_C_string(),
475 b->size(),
476 b->alignment(),
477 "REGULAR");
478 break;
479 }
480 case LayoutRawBlock::INLINED: {
481 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
482 output->print_cr(" @%d \"%s\" %s %d/%d %s",
483 b->offset(),
484 fi->name(_cp)->as_C_string(),
485 fi->signature(_cp)->as_C_string(),
486 b->size(),
487 b->alignment(),
488 "INLINED");
489 break;
490 }
491 case LayoutRawBlock::RESERVED: {
492 output->print_cr(" @%d %d/- %s",
493 b->offset(),
494 b->size(),
495 "RESERVED");
496 break;
497 }
498 case LayoutRawBlock::INHERITED: {
499 assert(!is_static, "Static fields are not inherited in layouts");
500 assert(super != NULL, "super klass must be provided to retrieve inherited fields info");
501 bool found = false;
502 const InstanceKlass* ik = super;
503 while (!found && ik != NULL) {
504 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
505 if (fs.offset() == b->offset()) {
506 output->print_cr(" @%d \"%s\" %s %d/%d %s",
507 b->offset(),
508 fs.name()->as_C_string(),
509 fs.signature()->as_C_string(),
510 b->size(),
511 b->size(), // so far, alignment constraint == size, will change with Valhalla
512 "INHERITED");
513 found = true;
514 break;
515 }
516 }
517 ik = ik->java_super();
518 }
519 break;
520 }
521 case LayoutRawBlock::EMPTY:
522 output->print_cr(" @%d %d/1 %s",
523 b->offset(),
524 b->size(),
525 "EMPTY");
526 break;
527 case LayoutRawBlock::PADDING:
528 output->print_cr(" @%d %d/1 %s",
529 b->offset(),
530 b->size(),
531 "PADDING");
532 break;
533 }
534 b = b->next_block();
535 }
536 }
537
538 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
539 Array<u2>* fields, bool is_contended, bool is_inline_type,
540 FieldLayoutInfo* info, Array<InlineKlass*>* inline_type_field_klasses) :
541 _classname(classname),
542 _super_klass(super_klass),
543 _constant_pool(constant_pool),
544 _fields(fields),
545 _info(info),
546 _inline_type_field_klasses(inline_type_field_klasses),
547 _root_group(NULL),
548 _contended_groups(GrowableArray<FieldGroup*>(8)),
549 _static_fields(NULL),
550 _layout(NULL),
551 _static_layout(NULL),
552 _nonstatic_oopmap_count(0),
553 _alignment(-1),
554 _first_field_offset(-1),
555 _exact_size_in_bytes(-1),
556 _has_nonstatic_fields(false),
557 _has_inline_type_fields(false),
558 _is_contended(is_contended),
559 _is_inline_type(is_inline_type),
560 _has_flattening_information(is_inline_type),
561 _has_nonatomic_values(false),
562 _atomic_field_count(0)
563 {}
564
565 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
566 assert(g > 0, "must only be called for named contended groups");
567 FieldGroup* fg = NULL;
568 for (int i = 0; i < _contended_groups.length(); i++) {
569 fg = _contended_groups.at(i);
570 if (fg->contended_group() == g) return fg;
571 }
572 fg = new FieldGroup(g);
573 _contended_groups.append(fg);
574 return fg;
575 }
576
577 void FieldLayoutBuilder::prologue() {
578 _layout = new FieldLayout(_fields, _constant_pool);
579 const InstanceKlass* super_klass = _super_klass;
580 _layout->initialize_instance_layout(super_klass);
581 if (super_klass != NULL) {
582 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
583 }
584 _static_layout = new FieldLayout(_fields, _constant_pool);
585 _static_layout->initialize_static_layout();
586 _static_fields = new FieldGroup();
587 _root_group = new FieldGroup();
588 }
589
590 // Field sorting for regular (non-inline) classes:
591 // - fields are sorted in static and non-static fields
592 // - non-static fields are also sorted according to their contention group
593 // (support of the @Contended annotation)
594 // - @Contended annotation is ignored for static fields
595 // - field flattening decisions are taken in this method
596 void FieldLayoutBuilder::regular_field_sorting() {
597 for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
598 FieldGroup* group = NULL;
599 if (fs.access_flags().is_static()) {
600 group = _static_fields;
601 } else {
602 _has_nonstatic_fields = true;
603 _atomic_field_count++; // we might decrement this
604 if (fs.is_contended()) {
605 int g = fs.contended_group();
606 if (g == 0) {
607 group = new FieldGroup(true);
608 _contended_groups.append(group);
609 } else {
610 group = get_or_create_contended_group(g);
611 }
612 } else {
613 group = _root_group;
614 }
615 }
616 assert(group != NULL, "invariant");
617 BasicType type = Signature::basic_type(fs.signature());
618 switch(type) {
619 case T_BYTE:
620 case T_CHAR:
621 case T_DOUBLE:
622 case T_FLOAT:
623 case T_INT:
624 case T_LONG:
625 case T_SHORT:
626 case T_BOOLEAN:
627 group->add_primitive_field(fs, type);
628 break;
629 case T_OBJECT:
630 case T_ARRAY:
631 if (group != _static_fields) _nonstatic_oopmap_count++;
632 group->add_oop_field(fs);
633 break;
634 case T_PRIMITIVE_OBJECT:
635 _has_inline_type_fields = true;
636 if (group == _static_fields) {
637 // static fields are never inlined
638 group->add_oop_field(fs);
639 } else {
640 _has_flattening_information = true;
641 // Flattening decision to be taken here
642 // This code assumes all verification already have been performed
643 // (field's type has been loaded and it is an inline klass)
644 JavaThread* THREAD = JavaThread::current();
645 Klass* klass = _inline_type_field_klasses->at(fs.index());
646 assert(klass != NULL, "Sanity check");
647 InlineKlass* vk = InlineKlass::cast(klass);
648 bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
649 (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
650 bool too_atomic_to_flatten = vk->is_declared_atomic() || AlwaysAtomicAccesses;
651 bool too_volatile_to_flatten = fs.access_flags().is_volatile();
652 if (vk->is_naturally_atomic()) {
653 too_atomic_to_flatten = false;
654 //too_volatile_to_flatten = false; //FIXME
655 // volatile fields are currently never inlined, this could change in the future
656 }
657 if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten) || fs.access_flags().is_final()) {
658 group->add_inlined_field(fs, vk);
659 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
660 fs.set_inlined(true);
661 if (!vk->is_atomic()) { // flat and non-atomic: take note
662 _has_nonatomic_values = true;
663 _atomic_field_count--; // every other field is atomic but this one
664 }
665 } else {
666 _nonstatic_oopmap_count++;
667 group->add_oop_field(fs);
668 }
669 }
670 break;
671 default:
672 fatal("Something wrong?");
673 }
674 }
675 _root_group->sort_by_size();
676 _static_fields->sort_by_size();
677 if (!_contended_groups.is_empty()) {
678 for (int i = 0; i < _contended_groups.length(); i++) {
679 _contended_groups.at(i)->sort_by_size();
680 }
681 }
682 }
683
684 /* Field sorting for inline classes:
685 * - because inline classes are immutable, the @Contended annotation is ignored
686 * when computing their layout (with only read operation, there's no false
687 * sharing issue)
688 * - this method also records the alignment of the field with the most
689 * constraining alignment, this value is then used as the alignment
690 * constraint when flattening this inline type into another container
691 * - field flattening decisions are taken in this method (those decisions are
692 * currently only based in the size of the fields to be inlined, the size
693 * of the resulting instance is not considered)
694 */
695 void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) {
696 assert(_is_inline_type, "Should only be used for inline classes");
697 int alignment = 1;
698 for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
699 FieldGroup* group = NULL;
700 int field_alignment = 1;
701 if (fs.access_flags().is_static()) {
702 group = _static_fields;
703 } else {
704 _has_nonstatic_fields = true;
705 _atomic_field_count++; // we might decrement this
706 group = _root_group;
707 }
708 assert(group != NULL, "invariant");
709 BasicType type = Signature::basic_type(fs.signature());
710 switch(type) {
711 case T_BYTE:
712 case T_CHAR:
713 case T_DOUBLE:
714 case T_FLOAT:
715 case T_INT:
716 case T_LONG:
717 case T_SHORT:
718 case T_BOOLEAN:
719 if (group != _static_fields) {
720 field_alignment = type2aelembytes(type); // alignment == size for primitive types
721 }
722 group->add_primitive_field(fs, type);
723 break;
724 case T_OBJECT:
725 case T_ARRAY:
726 if (group != _static_fields) {
727 _nonstatic_oopmap_count++;
728 field_alignment = type2aelembytes(type); // alignment == size for oops
729 }
730 group->add_oop_field(fs);
731 break;
732 case T_PRIMITIVE_OBJECT: {
733 // fs.set_inline(true);
734 _has_inline_type_fields = true;
735 if (group == _static_fields) {
736 // static fields are never inlined
737 group->add_oop_field(fs);
738 } else {
739 // Flattening decision to be taken here
740 // This code assumes all verifications have already been performed
741 // (field's type has been loaded and it is an inline klass)
742 JavaThread* THREAD = JavaThread::current();
743 Klass* klass = _inline_type_field_klasses->at(fs.index());
744 assert(klass != NULL, "Sanity check");
745 InlineKlass* vk = InlineKlass::cast(klass);
746 bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
747 (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
748 bool too_atomic_to_flatten = vk->is_declared_atomic() || AlwaysAtomicAccesses;
749 bool too_volatile_to_flatten = fs.access_flags().is_volatile();
750 if (vk->is_naturally_atomic()) {
751 too_atomic_to_flatten = false;
752 //too_volatile_to_flatten = false; //FIXME
753 // volatile fields are currently never inlined, this could change in the future
754 }
755 if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten) || fs.access_flags().is_final()) {
756 group->add_inlined_field(fs, vk);
757 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
758 field_alignment = vk->get_alignment();
759 fs.set_inlined(true);
760 if (!vk->is_atomic()) { // flat and non-atomic: take note
761 _has_nonatomic_values = true;
762 _atomic_field_count--; // every other field is atomic but this one
763 }
764 } else {
765 _nonstatic_oopmap_count++;
766 field_alignment = type2aelembytes(T_OBJECT);
767 group->add_oop_field(fs);
768 }
769 }
770 break;
771 }
772 default:
773 fatal("Unexpected BasicType");
774 }
775 if (!fs.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
776 }
777 _alignment = alignment;
778 if (!_has_nonstatic_fields) {
779 // There are a number of fixes required throughout the type system and JIT
780 Exceptions::fthrow(THREAD_AND_LOCATION,
781 vmSymbols::java_lang_ClassFormatError(),
782 "Value Types do not support zero instance size yet");
783 return;
784 }
785 }
786
787 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
788 if (ContendedPaddingWidth > 0) {
789 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
790 _layout->insert(slot, padding);
791 }
792 }
793
794 /* Computation of regular classes layout is an evolution of the previous default layout
795 * (FieldAllocationStyle 1):
796 * - primitive fields (both primitive types and flattened inline types) are allocated
797 * first, from the biggest to the smallest
798 * - then oop fields are allocated (to increase chances to have contiguous oops and
799 * a simpler oopmap).
800 */
801 void FieldLayoutBuilder::compute_regular_layout() {
802 bool need_tail_padding = false;
803 prologue();
804 regular_field_sorting();
805 if (_is_contended) {
806 _layout->set_start(_layout->last_block());
807 // insertion is currently easy because the current strategy doesn't try to fill holes
808 // in super classes layouts => the _start block is by consequence the _last_block
809 insert_contended_padding(_layout->start());
810 need_tail_padding = true;
811 }
812 _layout->add(_root_group->big_primitive_fields());
813 _layout->add(_root_group->small_primitive_fields());
814 _layout->add(_root_group->oop_fields());
815
816 if (!_contended_groups.is_empty()) {
817 for (int i = 0; i < _contended_groups.length(); i++) {
818 FieldGroup* cg = _contended_groups.at(i);
819 LayoutRawBlock* start = _layout->last_block();
820 insert_contended_padding(start);
821 _layout->add(cg->big_primitive_fields());
822 _layout->add(cg->small_primitive_fields(), start);
823 _layout->add(cg->oop_fields(), start);
824 need_tail_padding = true;
825 }
826 }
827
828 if (need_tail_padding) {
829 insert_contended_padding(_layout->last_block());
830 }
831 // Warning: IntanceMirrorKlass expects static oops to be allocated first
832 _static_layout->add_contiguously(_static_fields->oop_fields());
833 _static_layout->add(_static_fields->big_primitive_fields());
834 _static_layout->add(_static_fields->small_primitive_fields());
835
836 epilogue();
837 }
838
839 /* Computation of inline classes has a slightly different strategy than for
840 * regular classes. Regular classes have their oop fields allocated at the end
841 * of the layout to increase GC performances. Unfortunately, this strategy
842 * increases the number of empty slots inside an instance. Because the purpose
843 * of inline classes is to be embedded into other containers, it is critical
844 * to keep their size as small as possible. For this reason, the allocation
845 * strategy is:
846 * - big primitive fields (primitive types and flattened inline type smaller
847 * than an oop) are allocated first (from the biggest to the smallest)
848 * - then oop fields
849 * - then small primitive fields (from the biggest to the smallest)
850 */
851 void FieldLayoutBuilder::compute_inline_class_layout(TRAPS) {
852 prologue();
853 inline_class_field_sorting(CHECK);
854 // Inline types are not polymorphic, so they cannot inherit fields.
855 // By consequence, at this stage, the layout must be composed of a RESERVED
856 // block, followed by an EMPTY block.
857 assert(_layout->start()->kind() == LayoutRawBlock::RESERVED, "Unexpected");
858 assert(_layout->start()->next_block()->kind() == LayoutRawBlock::EMPTY, "Unexpected");
859 LayoutRawBlock* first_empty = _layout->start()->next_block();
860 if (first_empty->offset() % _alignment != 0) {
861 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _alignment - (first_empty->offset() % _alignment));
862 _layout->insert(first_empty, padding);
863 _layout->set_start(padding->next_block());
864 }
865
866 _layout->add(_root_group->big_primitive_fields());
867 _layout->add(_root_group->oop_fields());
868 _layout->add(_root_group->small_primitive_fields());
869
870 LayoutRawBlock* first_field = _layout->first_field_block();
871 if (first_field != NULL) {
872 _first_field_offset = _layout->first_field_block()->offset();
873 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
874 } else {
875 // special case for empty value types
876 _first_field_offset = _layout->blocks()->size();
877 _exact_size_in_bytes = 0;
878 }
879 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
880
881 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
882 _static_layout->add_contiguously(_static_fields->oop_fields());
883 _static_layout->add(_static_fields->big_primitive_fields());
884 _static_layout->add(_static_fields->small_primitive_fields());
885
886 epilogue();
887 }
888
889 void FieldLayoutBuilder::add_inlined_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
890 InlineKlass* vklass, int offset) {
891 int diff = offset - vklass->first_field_offset();
892 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
893 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
894 while (map < last_map) {
895 nonstatic_oop_maps->add(map->offset() + diff, map->count());
896 map++;
897 }
898 }
899
900 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
901 if (list != NULL) {
902 for (int i = 0; i < list->length(); i++) {
903 LayoutRawBlock* f = list->at(i);
904 if (f->kind() == LayoutRawBlock::INLINED) {
905 InlineKlass* vk = f->inline_klass();
906 assert(vk != NULL, "Should have been initialized");
907 if (vk->contains_oops()) {
908 add_inlined_field_oopmap(nonstatic_oop_maps, vk, f->offset());
909 }
910 }
911 }
912 }
913 }
914
915 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
916 if (group->oop_fields() != NULL) {
917 for (int i = 0; i < group->oop_fields()->length(); i++) {
918 LayoutRawBlock* b = group->oop_fields()->at(i);
919 nonstatic_oop_maps->add(b->offset(), 1);
920 }
921 }
922 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
923 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
924 }
925
926 void FieldLayoutBuilder::epilogue() {
927 // Computing oopmaps
928 int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
929 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
930 OopMapBlocksBuilder* nonstatic_oop_maps =
931 new OopMapBlocksBuilder(max_oop_map_count);
932 if (super_oop_map_count > 0) {
933 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
934 _super_klass->nonstatic_oop_map_count());
935 }
936 register_embedded_oops(nonstatic_oop_maps, _root_group);
937 if (!_contended_groups.is_empty()) {
938 for (int i = 0; i < _contended_groups.length(); i++) {
939 FieldGroup* cg = _contended_groups.at(i);
940 if (cg->oop_count() > 0) {
941 assert(cg->oop_fields() != NULL && cg->oop_fields()->at(0) != NULL, "oop_count > 0 but no oop fields found");
942 register_embedded_oops(nonstatic_oop_maps, cg);
943 }
944 }
945 }
946 nonstatic_oop_maps->compact();
947
948 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
949 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
950 int static_fields_size = (static_fields_end -
951 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
952 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
953
954 // Pass back information needed for InstanceKlass creation
955
956 _info->oop_map_blocks = nonstatic_oop_maps;
957 _info->_instance_size = align_object_size(instance_end / wordSize);
958 _info->_static_field_size = static_fields_size;
959 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
960 _info->_has_nonstatic_fields = _has_nonstatic_fields;
961 _info->_has_inline_fields = _has_inline_type_fields;
962
963 // An inline type is naturally atomic if it has just one field, and
964 // that field is simple enough.
965 _info->_is_naturally_atomic = (_is_inline_type &&
966 (_atomic_field_count <= 1) &&
967 !_has_nonatomic_values &&
968 _contended_groups.is_empty());
969 // This may be too restrictive, since if all the fields fit in 64
970 // bits we could make the decision to align instances of this class
971 // to 64-bit boundaries, and load and store them as single words.
972 // And on machines which supported larger atomics we could similarly
973 // allow larger values to be atomic, if properly aligned.
974
975
976 if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
977 ResourceMark rm;
978 tty->print_cr("Layout of class %s", _classname->as_C_string());
979 tty->print_cr("Instance fields:");
980 _layout->print(tty, false, _super_klass);
981 tty->print_cr("Static fields:");
982 _static_layout->print(tty, true, NULL);
983 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
984 if (_is_inline_type) {
985 tty->print_cr("First field offset = %d", _first_field_offset);
986 tty->print_cr("Alignment = %d bytes", _alignment);
987 tty->print_cr("Exact size = %d bytes", _exact_size_in_bytes);
988 }
989 tty->print_cr("---");
990 }
991 }
992
993 void FieldLayoutBuilder::build_layout(TRAPS) {
994 if (_is_inline_type) {
995 compute_inline_class_layout(CHECK);
996 } else {
997 compute_regular_layout();
998 }
999 }
|