8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classFileParser.hpp"
27 #include "classfile/fieldLayoutBuilder.hpp"
28 #include "jvm.h"
29 #include "memory/resourceArea.hpp"
30 #include "oops/array.hpp"
31 #include "oops/fieldStreams.inline.hpp"
32 #include "oops/instanceMirrorKlass.hpp"
33 #include "oops/instanceKlass.inline.hpp"
34 #include "oops/klass.inline.hpp"
35 #include "runtime/fieldDescriptor.inline.hpp"
36
37
38 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
39 _next_block(nullptr),
40 _prev_block(nullptr),
41 _kind(kind),
42 _offset(-1),
43 _alignment(1),
44 _size(size),
45 _field_index(-1),
46 _is_reference(false) {
47 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
48 "Otherwise, should use the constructor with a field index argument");
49 assert(size > 0, "Sanity check");
50 }
51
52
53 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
54 _next_block(nullptr),
55 _prev_block(nullptr),
56 _kind(kind),
57 _offset(-1),
58 _alignment(alignment),
59 _size(size),
60 _field_index(index),
61 _is_reference(is_reference) {
62 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
63 "Other kind do not have a field index");
64 assert(size > 0, "Sanity check");
65 assert(alignment > 0, "Sanity check");
66 }
67
68 bool LayoutRawBlock::fit(int size, int alignment) {
69 int adjustment = 0;
70 if ((_offset % alignment) != 0) {
71 adjustment = alignment - (_offset % alignment);
72 }
73 return _size >= size + adjustment;
74 }
75
76 FieldGroup::FieldGroup(int contended_group) :
77 _next(nullptr),
78 _primitive_fields(nullptr),
79 _oop_fields(nullptr),
80 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
81 _oop_count(0) {}
82
83 void FieldGroup::add_primitive_field(int idx, BasicType type) {
84 int size = type2aelembytes(type);
85 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
86 if (_primitive_fields == nullptr) {
87 _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
88 }
89 _primitive_fields->append(block);
90 }
91
92 void FieldGroup::add_oop_field(int idx) {
93 int size = type2aelembytes(T_OBJECT);
94 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
95 if (_oop_fields == nullptr) {
96 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
97 }
98 _oop_fields->append(block);
99 _oop_count++;
100 }
101
102 void FieldGroup::sort_by_size() {
103 if (_primitive_fields != nullptr) {
104 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
105 }
106 }
107
108 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
109 _field_info(field_info),
110 _cp(cp),
111 _blocks(nullptr),
112 _start(_blocks),
113 _last(_blocks) {}
114
115 void FieldLayout::initialize_static_layout() {
116 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
117 _blocks->set_offset(0);
118 _last = _blocks;
119 _start = _blocks;
120 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
121 // during bootstrapping, the size of the java.lang.Class is still not known when layout
122 // of static field is computed. Field offsets are fixed later when the size is known
123 // (see java_lang_Class::fixup_mirror())
124 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
125 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
126 _blocks->set_offset(0);
127 }
128 }
129
130 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
131 if (super_klass == nullptr) {
132 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
133 _blocks->set_offset(0);
134 _last = _blocks;
135 _start = _blocks;
136 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
137 } else {
138 bool has_fields = reconstruct_layout(super_klass);
139 fill_holes(super_klass);
140 if (!super_klass->has_contended_annotations() || !has_fields) {
141 _start = _blocks; // start allocating fields from the first empty block
142 } else {
143 _start = _last; // append fields at the end of the reconstructed layout
144 }
145 }
146 }
147
148 LayoutRawBlock* FieldLayout::first_field_block() {
149 LayoutRawBlock* block = _start;
150 while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
151 && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
152 block = block->next_block();
153 }
154 return block;
155 }
156
157
158 // Insert a set of fields into a layout using a best-fit strategy.
159 // For each field, search for the smallest empty slot able to fit the field
160 // (satisfying both size and alignment requirements), if none is found,
161 // add the field at the end of the layout.
162 // Fields cannot be inserted before the block specified in the "start" argument
163 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
164 if (list == nullptr) return;
165 if (start == nullptr) start = this->_start;
166 bool last_search_success = false;
167 int last_size = 0;
168 int last_alignment = 0;
169 for (int i = 0; i < list->length(); i ++) {
170 LayoutRawBlock* b = list->at(i);
171 LayoutRawBlock* cursor = nullptr;
172 LayoutRawBlock* candidate = nullptr;
173
174 // if start is the last block, just append the field
175 if (start == last_block()) {
176 candidate = last_block();
177 }
178 // Before iterating over the layout to find an empty slot fitting the field's requirements,
179 // check if the previous field had the same requirements and if the search for a fitting slot
180 // was successful. If the requirements were the same but the search failed, a new search will
181 // fail the same way, so just append the field at the of the layout.
182 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
183 candidate = last_block();
184 } else {
185 // Iterate over the layout to find an empty slot fitting the field's requirements
186 last_size = b->size();
187 last_alignment = b->alignment();
188 cursor = last_block()->prev_block();
189 assert(cursor != nullptr, "Sanity check");
190 last_search_success = true;
191 while (cursor != start) {
192 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
193 if (candidate == nullptr || cursor->size() < candidate->size()) {
194 candidate = cursor;
195 }
196 }
197 cursor = cursor->prev_block();
198 }
199 if (candidate == nullptr) {
200 candidate = last_block();
201 last_search_success = false;
202 }
203 assert(candidate != nullptr, "Candidate must not be null");
204 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
205 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
206 }
207
208 insert_field_block(candidate, b);
209 }
210 }
211
212 // Used for classes with hard coded field offsets, insert a field at the specified offset */
213 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
214 assert(block != nullptr, "Sanity check");
215 block->set_offset(offset);
216 if (start == nullptr) {
217 start = this->_start;
218 }
219 LayoutRawBlock* slot = start;
220 while (slot != nullptr) {
221 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
222 slot == _last){
223 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
224 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
225 if (slot->offset() < block->offset()) {
226 int adjustment = block->offset() - slot->offset();
227 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
228 insert(slot, adj);
229 }
230 insert(slot, block);
231 if (slot->size() == 0) {
232 remove(slot);
233 }
234 _field_info->adr_at(block->field_index())->set_offset(block->offset());
235 return;
236 }
237 slot = slot->next_block();
238 }
239 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
240 }
241
242 // The allocation logic uses a best fit strategy: the set of fields is allocated
243 // in the first empty slot big enough to contain the whole set ((including padding
244 // to fit alignment constraints).
245 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
246 if (list == nullptr) return;
247 if (start == nullptr) {
248 start = _start;
249 }
250 // This code assumes that if the first block is well aligned, the following
251 // blocks would naturally be well aligned (no need for adjustment)
252 int size = 0;
253 for (int i = 0; i < list->length(); i++) {
254 size += list->at(i)->size();
255 }
256
257 LayoutRawBlock* candidate = nullptr;
258 if (start == last_block()) {
259 candidate = last_block();
260 } else {
261 LayoutRawBlock* first = list->at(0);
262 candidate = last_block()->prev_block();
263 while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
264 if (candidate == start) {
265 candidate = last_block();
266 break;
267 }
268 candidate = candidate->prev_block();
269 }
270 assert(candidate != nullptr, "Candidate must not be null");
271 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
272 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
273 }
274
275 for (int i = 0; i < list->length(); i++) {
276 LayoutRawBlock* b = list->at(i);
277 insert_field_block(candidate, b);
278 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
279 }
280 }
281
282 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
283 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
284 if (slot->offset() % block->alignment() != 0) {
285 int adjustment = block->alignment() - (slot->offset() % block->alignment());
286 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
287 insert(slot, adj);
288 }
289 insert(slot, block);
290 if (slot->size() == 0) {
291 remove(slot);
292 }
293 _field_info->adr_at(block->field_index())->set_offset(block->offset());
294 return block;
295 }
296
297 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
298 bool has_instance_fields = false;
299 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
300 while (ik != nullptr) {
301 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
302 BasicType type = Signature::basic_type(fs.signature());
303 // distinction between static and non-static fields is missing
304 if (fs.access_flags().is_static()) continue;
305 has_instance_fields = true;
306 int size = type2aelembytes(type);
307 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
308 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
309 block->set_offset(fs.offset());
310 all_fields->append(block);
311 }
312 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
313 }
314
315 all_fields->sort(LayoutRawBlock::compare_offset);
316 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
317 _blocks->set_offset(0);
318 _last = _blocks;
319
320 for(int i = 0; i < all_fields->length(); i++) {
321 LayoutRawBlock* b = all_fields->at(i);
322 _last->set_next_block(b);
323 b->set_prev_block(_last);
324 _last = b;
325 }
326 _start = _blocks;
327 return has_instance_fields;
328 }
329
330 // Called during the reconstruction of a layout, after fields from super
331 // classes have been inserted. It fills unused slots between inserted fields
332 // with EMPTY blocks, so the regular field insertion methods would work.
333 // This method handles classes with @Contended annotations differently
334 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
335 // fields to interfere with contended fields/classes.
336 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
337 assert(_blocks != nullptr, "Sanity check");
338 assert(_blocks->offset() == 0, "first block must be at offset zero");
339 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
340 LayoutRawBlock* b = _blocks;
341 while (b->next_block() != nullptr) {
342 if (b->next_block()->offset() > (b->offset() + b->size())) {
343 int size = b->next_block()->offset() - (b->offset() + b->size());
344 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
345 empty->set_offset(b->offset() + b->size());
346 empty->set_next_block(b->next_block());
347 b->next_block()->set_prev_block(empty);
348 b->set_next_block(empty);
349 empty->set_prev_block(b);
350 }
351 b = b->next_block();
352 }
353 assert(b->next_block() == nullptr, "Invariant at this point");
354 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
355
356 // If the super class has @Contended annotation, a padding block is
357 // inserted at the end to ensure that fields from the subclasses won't share
358 // the cache line of the last field of the contended class
359 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
360 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
361 p->set_offset(b->offset() + b->size());
362 b->set_next_block(p);
363 p->set_prev_block(b);
364 b = p;
365 }
366
367 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
368 last->set_offset(b->offset() + b->size());
369 assert(last->offset() > 0, "Sanity check");
370 b->set_next_block(last);
371 last->set_prev_block(b);
372 _last = last;
373 }
374
375 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
376 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
377 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
378 block->set_offset(slot->offset());
379 slot->set_offset(slot->offset() + block->size());
380 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
381 assert(slot->size() - block->size() >= 0, "no negative size allowed");
382 slot->set_size(slot->size() - block->size());
383 block->set_prev_block(slot->prev_block());
384 block->set_next_block(slot);
385 slot->set_prev_block(block);
386 if (block->prev_block() != nullptr) {
387 block->prev_block()->set_next_block(block);
388 }
389 if (_blocks == slot) {
390 _blocks = block;
391 }
392 return block;
393 }
394
395 void FieldLayout::remove(LayoutRawBlock* block) {
396 assert(block != nullptr, "Sanity check");
397 assert(block != _last, "Sanity check");
398 if (_blocks == block) {
399 _blocks = block->next_block();
400 if (_blocks != nullptr) {
401 _blocks->set_prev_block(nullptr);
402 }
403 } else {
404 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
405 block->prev_block()->set_next_block(block->next_block());
406 block->next_block()->set_prev_block(block->prev_block());
407 }
408 if (block == _start) {
409 _start = block->prev_block();
410 }
411 }
412
413 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
414 ResourceMark rm;
415 LayoutRawBlock* b = _blocks;
416 while(b != _last) {
417 switch(b->kind()) {
418 case LayoutRawBlock::REGULAR: {
419 FieldInfo* fi = _field_info->adr_at(b->field_index());
420 output->print_cr(" @%d \"%s\" %s %d/%d %s",
421 b->offset(),
422 fi->name(_cp)->as_C_string(),
423 fi->signature(_cp)->as_C_string(),
424 b->size(),
425 b->alignment(),
426 "REGULAR");
427 break;
428 }
429 case LayoutRawBlock::FLATTENED: {
430 FieldInfo* fi = _field_info->adr_at(b->field_index());
431 output->print_cr(" @%d \"%s\" %s %d/%d %s",
432 b->offset(),
433 fi->name(_cp)->as_C_string(),
434 fi->signature(_cp)->as_C_string(),
435 b->size(),
436 b->alignment(),
437 "FLATTENED");
438 break;
439 }
440 case LayoutRawBlock::RESERVED: {
441 output->print_cr(" @%d %d/- %s",
442 b->offset(),
443 b->size(),
444 "RESERVED");
445 break;
446 }
447 case LayoutRawBlock::INHERITED: {
448 assert(!is_static, "Static fields are not inherited in layouts");
449 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
450 bool found = false;
451 const InstanceKlass* ik = super;
452 while (!found && ik != nullptr) {
453 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
454 if (fs.offset() == b->offset()) {
455 output->print_cr(" @%d \"%s\" %s %d/%d %s",
456 b->offset(),
457 fs.name()->as_C_string(),
458 fs.signature()->as_C_string(),
459 b->size(),
460 b->size(), // so far, alignment constraint == size, will change with Valhalla
461 "INHERITED");
462 found = true;
463 break;
464 }
465 }
466 ik = ik->java_super();
467 }
468 break;
469 }
470 case LayoutRawBlock::EMPTY:
471 output->print_cr(" @%d %d/1 %s",
472 b->offset(),
473 b->size(),
474 "EMPTY");
475 break;
476 case LayoutRawBlock::PADDING:
477 output->print_cr(" @%d %d/1 %s",
478 b->offset(),
479 b->size(),
480 "PADDING");
481 break;
482 }
483 b = b->next_block();
484 }
485 }
486
487 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
488 GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :
489 _classname(classname),
490 _super_klass(super_klass),
491 _constant_pool(constant_pool),
492 _field_info(field_info),
493 _info(info),
494 _root_group(nullptr),
495 _contended_groups(GrowableArray<FieldGroup*>(8)),
496 _static_fields(nullptr),
497 _layout(nullptr),
498 _static_layout(nullptr),
499 _nonstatic_oopmap_count(0),
500 _alignment(-1),
501 _has_nonstatic_fields(false),
502 _is_contended(is_contended) {}
503
504
505 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
506 assert(g > 0, "must only be called for named contended groups");
507 FieldGroup* fg = nullptr;
508 for (int i = 0; i < _contended_groups.length(); i++) {
509 fg = _contended_groups.at(i);
510 if (fg->contended_group() == g) return fg;
511 }
512 fg = new FieldGroup(g);
513 _contended_groups.append(fg);
514 return fg;
515 }
516
517 void FieldLayoutBuilder::prologue() {
518 _layout = new FieldLayout(_field_info, _constant_pool);
519 const InstanceKlass* super_klass = _super_klass;
520 _layout->initialize_instance_layout(super_klass);
521 if (super_klass != nullptr) {
522 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
523 }
524 _static_layout = new FieldLayout(_field_info, _constant_pool);
525 _static_layout->initialize_static_layout();
526 _static_fields = new FieldGroup();
527 _root_group = new FieldGroup();
528 }
529
530 // Field sorting for regular classes:
531 // - fields are sorted in static and non-static fields
532 // - non-static fields are also sorted according to their contention group
533 // (support of the @Contended annotation)
534 // - @Contended annotation is ignored for static fields
535 void FieldLayoutBuilder::regular_field_sorting() {
536 int idx = 0;
537 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
538 FieldInfo ctrl = _field_info->at(0);
539 FieldGroup* group = nullptr;
540 FieldInfo fieldinfo = *it;
541 if (fieldinfo.access_flags().is_static()) {
542 group = _static_fields;
543 } else {
544 _has_nonstatic_fields = true;
545 if (fieldinfo.field_flags().is_contended()) {
546 int g = fieldinfo.contended_group();
547 if (g == 0) {
548 group = new FieldGroup(true);
549 _contended_groups.append(group);
550 } else {
551 group = get_or_create_contended_group(g);
552 }
553 } else {
554 group = _root_group;
555 }
556 }
557 assert(group != nullptr, "invariant");
558 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
559 switch(type) {
560 case T_BYTE:
561 case T_CHAR:
562 case T_DOUBLE:
563 case T_FLOAT:
564 case T_INT:
565 case T_LONG:
566 case T_SHORT:
567 case T_BOOLEAN:
568 group->add_primitive_field(idx, type);
569 break;
570 case T_OBJECT:
571 case T_ARRAY:
572 if (group != _static_fields) _nonstatic_oopmap_count++;
573 group->add_oop_field(idx);
574 break;
575 default:
576 fatal("Something wrong?");
577 }
578 }
579 _root_group->sort_by_size();
580 _static_fields->sort_by_size();
581 if (!_contended_groups.is_empty()) {
582 for (int i = 0; i < _contended_groups.length(); i++) {
583 _contended_groups.at(i)->sort_by_size();
584 }
585 }
586 }
587
588 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
589 if (ContendedPaddingWidth > 0) {
590 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
591 _layout->insert(slot, padding);
592 }
593 }
594
595 // Computation of regular classes layout is an evolution of the previous default layout
596 // (FieldAllocationStyle 1):
597 // - primitive fields are allocated first (from the biggest to the smallest)
598 // - then oop fields are allocated, either in existing gaps or at the end of
599 // the layout
600 void FieldLayoutBuilder::compute_regular_layout() {
601 bool need_tail_padding = false;
602 prologue();
603 regular_field_sorting();
604
605 if (_is_contended) {
606 _layout->set_start(_layout->last_block());
607 // insertion is currently easy because the current strategy doesn't try to fill holes
608 // in super classes layouts => the _start block is by consequence the _last_block
609 insert_contended_padding(_layout->start());
610 need_tail_padding = true;
611 }
612 _layout->add(_root_group->primitive_fields());
613 _layout->add(_root_group->oop_fields());
614
615 if (!_contended_groups.is_empty()) {
616 for (int i = 0; i < _contended_groups.length(); i++) {
617 FieldGroup* cg = _contended_groups.at(i);
618 LayoutRawBlock* start = _layout->last_block();
619 insert_contended_padding(start);
620 _layout->add(cg->primitive_fields(), start);
621 _layout->add(cg->oop_fields(), start);
622 need_tail_padding = true;
623 }
624 }
625
626 if (need_tail_padding) {
627 insert_contended_padding(_layout->last_block());
628 }
629
630 _static_layout->add_contiguously(this->_static_fields->oop_fields());
631 _static_layout->add(this->_static_fields->primitive_fields());
632
633 epilogue();
634 }
635
636 void FieldLayoutBuilder::epilogue() {
637 // Computing oopmaps
638 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
639 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
640
641 OopMapBlocksBuilder* nonstatic_oop_maps =
642 new OopMapBlocksBuilder(max_oop_map_count);
643 if (super_oop_map_count > 0) {
644 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
645 _super_klass->nonstatic_oop_map_count());
646 }
647
648 if (_root_group->oop_fields() != nullptr) {
649 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
650 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
651 nonstatic_oop_maps->add(b->offset(), 1);
652 }
653 }
654
655 if (!_contended_groups.is_empty()) {
656 for (int i = 0; i < _contended_groups.length(); i++) {
657 FieldGroup* cg = _contended_groups.at(i);
658 if (cg->oop_count() > 0) {
659 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
660 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
661 }
662 }
663 }
664
665 nonstatic_oop_maps->compact();
666
667 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
668 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
669 int static_fields_size = (static_fields_end -
670 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
671 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
672
673 // Pass back information needed for InstanceKlass creation
674
675 _info->oop_map_blocks = nonstatic_oop_maps;
676 _info->_instance_size = align_object_size(instance_end / wordSize);
677 _info->_static_field_size = static_fields_size;
678 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
679 _info->_has_nonstatic_fields = _has_nonstatic_fields;
680
681 if (PrintFieldLayout) {
682 ResourceMark rm;
683 tty->print_cr("Layout of class %s", _classname->as_C_string());
684 tty->print_cr("Instance fields:");
685 _layout->print(tty, false, _super_klass);
686 tty->print_cr("Static fields:");
687 _static_layout->print(tty, true, nullptr);
688 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
689 tty->print_cr("---");
690 }
691 }
692
693 void FieldLayoutBuilder::build_layout() {
694 compute_regular_layout();
695 }
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/classFileParser.hpp"
27 #include "classfile/fieldLayoutBuilder.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "jvm.h"
31 #include "memory/resourceArea.hpp"
32 #include "oops/array.hpp"
33 #include "oops/fieldStreams.inline.hpp"
34 #include "oops/instanceMirrorKlass.hpp"
35 #include "oops/instanceKlass.inline.hpp"
36 #include "oops/klass.inline.hpp"
37 #include "oops/inlineKlass.inline.hpp"
38 #include "runtime/fieldDescriptor.inline.hpp"
39 #include "utilities/powerOfTwo.hpp"
40
41 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
42 bool use_atomic_flat) {
43
44 if (field_info.field_flags().is_injected()) {
45 // don't flatten injected fields
46 return LayoutKind::REFERENCE;
47 }
48
49 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
50 // field's type is not a known value class, using a reference
51 return LayoutKind::REFERENCE;
52 }
53
54 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
55 InlineKlass* vk = inline_field_info->klass();
56
57 if (field_info.field_flags().is_null_free_inline_type()) {
58 assert(vk->is_implicitly_constructible(), "null-free fields must be implicitly constructible");
59 if (vk->must_be_atomic() || field_info.access_flags().is_volatile() || AlwaysAtomicAccesses) {
60 if (vk->is_naturally_atomic() && vk->has_non_atomic_layout()) return LayoutKind::NON_ATOMIC_FLAT;
61 return (vk->has_atomic_layout() && use_atomic_flat) ? LayoutKind::ATOMIC_FLAT : LayoutKind::REFERENCE;
62 } else {
63 return vk->has_non_atomic_layout() ? LayoutKind::NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
64 }
65 } else {
66 if (NullableFieldFlattening && vk->has_nullable_layout()) {
67 return use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
68 } else {
69 return LayoutKind::REFERENCE;
70 }
71 }
72 }
73
74 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
75 switch(kind) {
76 case LayoutKind::NON_ATOMIC_FLAT:
77 *size = vk->non_atomic_size_in_bytes();
78 *alignment = vk->non_atomic_alignment();
79 break;
80 case LayoutKind::ATOMIC_FLAT:
81 *size = vk->atomic_size_in_bytes();
82 *alignment = *size;
83 break;
84 case LayoutKind::NULLABLE_ATOMIC_FLAT:
85 *size = vk->nullable_size_in_bytes();
86 *alignment = *size;
87 break;
88 default:
89 ShouldNotReachHere();
90 }
91 }
92
93 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
94 _next_block(nullptr),
95 _prev_block(nullptr),
96 _inline_klass(nullptr),
97 _block_kind(kind),
98 _offset(-1),
99 _alignment(1),
100 _size(size),
101 _field_index(-1) {
102 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
103 "Otherwise, should use the constructor with a field index argument");
104 assert(size > 0, "Sanity check");
105 }
106
107
108 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
109 _next_block(nullptr),
110 _prev_block(nullptr),
111 _inline_klass(nullptr),
112 _block_kind(kind),
113 _offset(-1),
114 _alignment(alignment),
115 _size(size),
116 _field_index(index) {
117 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
118 "Other kind do not have a field index");
119 assert(size > 0, "Sanity check");
120 assert(alignment > 0, "Sanity check");
121 }
122
123 bool LayoutRawBlock::fit(int size, int alignment) {
124 int adjustment = 0;
125 if ((_offset % alignment) != 0) {
126 adjustment = alignment - (_offset % alignment);
127 }
128 return _size >= size + adjustment;
129 }
130
131 FieldGroup::FieldGroup(int contended_group) :
132 _next(nullptr),
133 _small_primitive_fields(nullptr),
134 _big_primitive_fields(nullptr),
135 _oop_fields(nullptr),
136 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
137 _oop_count(0) {}
138
139 void FieldGroup::add_primitive_field(int idx, BasicType type) {
140 int size = type2aelembytes(type);
141 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
142 if (size >= oopSize) {
143 add_to_big_primitive_list(block);
144 } else {
145 add_to_small_primitive_list(block);
146 }
147 }
148
149 void FieldGroup::add_oop_field(int idx) {
150 int size = type2aelembytes(T_OBJECT);
151 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
152 if (_oop_fields == nullptr) {
153 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
154 }
155 _oop_fields->append(block);
156 _oop_count++;
157 }
158
159 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
160 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
161 block->set_inline_klass(vk);
162 block->set_layout_kind(lk);
163 if (block->size() >= oopSize) {
164 add_to_big_primitive_list(block);
165 } else {
166 add_to_small_primitive_list(block);
167 }
168 }
169
170 void FieldGroup::sort_by_size() {
171 if (_small_primitive_fields != nullptr) {
172 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
173 }
174 if (_big_primitive_fields != nullptr) {
175 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
176 }
177 }
178
179 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
180 if (_small_primitive_fields == nullptr) {
181 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
182 }
183 _small_primitive_fields->append(block);
184 }
185
186 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
187 if (_big_primitive_fields == nullptr) {
188 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
189 }
190 _big_primitive_fields->append(block);
191 }
192
193 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
194 _field_info(field_info),
195 _inline_layout_info_array(inline_layout_info_array),
196 _cp(cp),
197 _blocks(nullptr),
198 _start(_blocks),
199 _last(_blocks),
200 _super_first_field_offset(-1),
201 _super_alignment(-1),
202 _super_min_align_required(-1),
203 _default_value_offset(-1),
204 _null_reset_value_offset(-1),
205 _super_has_fields(false),
206 _has_inherited_fields(false) {}
207
208 void FieldLayout::initialize_static_layout() {
209 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
210 _blocks->set_offset(0);
211 _last = _blocks;
212 _start = _blocks;
213 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
214 // during bootstrapping, the size of the java.lang.Class is still not known when layout
215 // of static field is computed. Field offsets are fixed later when the size is known
216 // (see java_lang_Class::fixup_mirror())
217 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
218 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
219 _blocks->set_offset(0);
220 }
221 }
222
223 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
224 if (super_klass == nullptr) {
225 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
226 _blocks->set_offset(0);
227 _last = _blocks;
228 _start = _blocks;
229 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
230 } else {
231 _super_has_fields = reconstruct_layout(super_klass);
232 fill_holes(super_klass);
233 if ((!super_klass->has_contended_annotations()) || !_super_has_fields) {
234 _start = _blocks; // start allocating fields from the first empty block
235 } else {
236 _start = _last; // append fields at the end of the reconstructed layout
237 }
238 }
239 }
240
241 LayoutRawBlock* FieldLayout::first_field_block() {
242 LayoutRawBlock* block = _blocks;
243 while (block != nullptr
244 && block->block_kind() != LayoutRawBlock::INHERITED
245 && block->block_kind() != LayoutRawBlock::REGULAR
246 && block->block_kind() != LayoutRawBlock::FLAT
247 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
248 block = block->next_block();
249 }
250 return block;
251 }
252
253 // Insert a set of fields into a layout.
254 // For each field, search for an empty slot able to fit the field
255 // (satisfying both size and alignment requirements), if none is found,
256 // add the field at the end of the layout.
257 // Fields cannot be inserted before the block specified in the "start" argument
258 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
259 if (list == nullptr) return;
260 if (start == nullptr) start = this->_start;
261 bool last_search_success = false;
262 int last_size = 0;
263 int last_alignment = 0;
264 for (int i = 0; i < list->length(); i ++) {
265 LayoutRawBlock* b = list->at(i);
266 LayoutRawBlock* cursor = nullptr;
267 LayoutRawBlock* candidate = nullptr;
268 // if start is the last block, just append the field
269 if (start == last_block()) {
270 candidate = last_block();
271 }
272 // Before iterating over the layout to find an empty slot fitting the field's requirements,
273 // check if the previous field had the same requirements and if the search for a fitting slot
274 // was successful. If the requirements were the same but the search failed, a new search will
275 // fail the same way, so just append the field at the of the layout.
276 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
277 candidate = last_block();
278 } else {
279 // Iterate over the layout to find an empty slot fitting the field's requirements
280 last_size = b->size();
281 last_alignment = b->alignment();
282 cursor = last_block()->prev_block();
283 assert(cursor != nullptr, "Sanity check");
284 last_search_success = true;
285
286 while (cursor != start) {
287 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
288 if (candidate == nullptr || cursor->size() < candidate->size()) {
289 candidate = cursor;
290 }
291 }
292 cursor = cursor->prev_block();
293 }
294 if (candidate == nullptr) {
295 candidate = last_block();
296 last_search_success = false;
297 }
298 assert(candidate != nullptr, "Candidate must not be null");
299 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
300 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
301 }
302 insert_field_block(candidate, b);
303 }
304 }
305
306 // Used for classes with hard coded field offsets, insert a field at the specified offset */
307 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
308 assert(block != nullptr, "Sanity check");
309 block->set_offset(offset);
310 if (start == nullptr) {
311 start = this->_start;
312 }
313 LayoutRawBlock* slot = start;
314 while (slot != nullptr) {
315 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
316 slot == _last){
317 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
318 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
319 if (slot->offset() < block->offset()) {
320 int adjustment = block->offset() - slot->offset();
321 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
322 insert(slot, adj);
323 }
324 insert(slot, block);
325 if (slot->size() == 0) {
326 remove(slot);
327 }
328 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
329 _field_info->adr_at(block->field_index())->set_offset(block->offset());
330 }
331 return;
332 }
333 slot = slot->next_block();
334 }
335 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
336 }
337
338 // The allocation logic uses a best fit strategy: the set of fields is allocated
339 // in the first empty slot big enough to contain the whole set ((including padding
340 // to fit alignment constraints).
341 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
342 if (list == nullptr) return;
343 if (start == nullptr) {
344 start = _start;
345 }
346 // This code assumes that if the first block is well aligned, the following
347 // blocks would naturally be well aligned (no need for adjustment)
348 int size = 0;
349 for (int i = 0; i < list->length(); i++) {
350 size += list->at(i)->size();
351 }
352
353 LayoutRawBlock* candidate = nullptr;
354 if (start == last_block()) {
355 candidate = last_block();
356 } else {
357 LayoutRawBlock* first = list->at(0);
358 candidate = last_block()->prev_block();
359 while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
360 if (candidate == start) {
361 candidate = last_block();
362 break;
363 }
364 candidate = candidate->prev_block();
365 }
366 assert(candidate != nullptr, "Candidate must not be null");
367 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
368 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
369 }
370
371 for (int i = 0; i < list->length(); i++) {
372 LayoutRawBlock* b = list->at(i);
373 insert_field_block(candidate, b);
374 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
375 }
376 }
377
378 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
379 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
380 if (slot->offset() % block->alignment() != 0) {
381 int adjustment = block->alignment() - (slot->offset() % block->alignment());
382 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
383 insert(slot, adj);
384 }
385 assert(block->size() >= block->size(), "Enough space must remain after adjustment");
386 insert(slot, block);
387 if (slot->size() == 0) {
388 remove(slot);
389 }
390 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
391 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
392 _field_info->adr_at(block->field_index())->set_offset(block->offset());
393 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::default_value_name()) {
394 _default_value_offset = block->offset();
395 }
396 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
397 _null_reset_value_offset = block->offset();
398 }
399 }
400 if (block->block_kind() == LayoutRawBlock::FLAT && block->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
401 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->first_field_offset() + block->offset();
402 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
403 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
404 }
405
406 return block;
407 }
408
409 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
410 bool has_instance_fields = false;
411 if (ik->is_abstract() && !ik->is_identity_class()) {
412 _super_alignment = type2aelembytes(BasicType::T_LONG);
413 }
414 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
415 while (ik != nullptr) {
416 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
417 BasicType type = Signature::basic_type(fs.signature());
418 // distinction between static and non-static fields is missing
419 if (fs.access_flags().is_static()) continue;
420 has_instance_fields = true;
421 _has_inherited_fields = true;
422 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) _super_first_field_offset = fs.offset();
423 LayoutRawBlock* block;
424 if (fs.is_flat()) {
425 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
426 InlineKlass* vk = layout_info.klass();
427 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
428 vk->layout_size_in_bytes(layout_info.kind()),
429 vk->layout_alignment(layout_info.kind()));
430 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
431 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
432 } else {
433 int size = type2aelembytes(type);
434 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
435 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
436 // For primitive types, the alignment is equal to the size
437 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
438 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
439 }
440 block->set_offset(fs.offset());
441 all_fields->append(block);
442 }
443 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
444 }
445 all_fields->sort(LayoutRawBlock::compare_offset);
446 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
447 _blocks->set_offset(0);
448 _last = _blocks;
449 for(int i = 0; i < all_fields->length(); i++) {
450 LayoutRawBlock* b = all_fields->at(i);
451 _last->set_next_block(b);
452 b->set_prev_block(_last);
453 _last = b;
454 }
455 _start = _blocks;
456 return has_instance_fields;
457 }
458
459 // Called during the reconstruction of a layout, after fields from super
460 // classes have been inserted. It fills unused slots between inserted fields
461 // with EMPTY blocks, so the regular field insertion methods would work.
462 // This method handles classes with @Contended annotations differently
463 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
464 // fields to interfere with contended fields/classes.
465 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
466 assert(_blocks != nullptr, "Sanity check");
467 assert(_blocks->offset() == 0, "first block must be at offset zero");
468 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
469 LayoutRawBlock* b = _blocks;
470 while (b->next_block() != nullptr) {
471 if (b->next_block()->offset() > (b->offset() + b->size())) {
472 int size = b->next_block()->offset() - (b->offset() + b->size());
473 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
474 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
475 empty->set_offset(b->offset() + b->size());
476 empty->set_next_block(b->next_block());
477 b->next_block()->set_prev_block(empty);
478 b->set_next_block(empty);
479 empty->set_prev_block(b);
480 }
481 b = b->next_block();
482 }
483 assert(b->next_block() == nullptr, "Invariant at this point");
484 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
485 // If the super class has @Contended annotation, a padding block is
486 // inserted at the end to ensure that fields from the subclasses won't share
487 // the cache line of the last field of the contended class
488 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
489 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
490 p->set_offset(b->offset() + b->size());
491 b->set_next_block(p);
492 p->set_prev_block(b);
493 b = p;
494 }
495
496 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
497 last->set_offset(b->offset() + b->size());
498 assert(last->offset() > 0, "Sanity check");
499 b->set_next_block(last);
500 last->set_prev_block(b);
501 _last = last;
502 }
503
504 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
505 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
506 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
507 block->set_offset(slot->offset());
508 slot->set_offset(slot->offset() + block->size());
509 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
510 assert(slot->size() - block->size() >= 0, "no negative size allowed");
511 slot->set_size(slot->size() - block->size());
512 block->set_prev_block(slot->prev_block());
513 block->set_next_block(slot);
514 slot->set_prev_block(block);
515 if (block->prev_block() != nullptr) {
516 block->prev_block()->set_next_block(block);
517 }
518 if (_blocks == slot) {
519 _blocks = block;
520 }
521 if (_start == slot) {
522 _start = block;
523 }
524 return block;
525 }
526
527 void FieldLayout::remove(LayoutRawBlock* block) {
528 assert(block != nullptr, "Sanity check");
529 assert(block != _last, "Sanity check");
530 if (_blocks == block) {
531 _blocks = block->next_block();
532 if (_blocks != nullptr) {
533 _blocks->set_prev_block(nullptr);
534 }
535 } else {
536 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
537 block->prev_block()->set_next_block(block->next_block());
538 block->next_block()->set_prev_block(block->prev_block());
539 }
540 if (block == _start) {
541 _start = block->prev_block();
542 }
543 }
544
545 void FieldLayout::shift_fields(int shift) {
546 LayoutRawBlock* b = first_field_block();
547 LayoutRawBlock* previous = b->prev_block();
548 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
549 previous->set_size(previous->size() + shift);
550 } else {
551 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
552 nb->set_offset(b->offset());
553 previous->set_next_block(nb);
554 nb->set_prev_block(previous);
555 b->set_prev_block(nb);
556 nb->set_next_block(b);
557 }
558 while (b != nullptr) {
559 b->set_offset(b->offset() + shift);
560 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
561 _field_info->adr_at(b->field_index())->set_offset(b->offset());
562 if (b->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
563 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
564 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
565 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
566
567 }
568 }
569 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
570 b = b->next_block();
571 }
572 }
573
574 LayoutRawBlock* FieldLayout::find_null_marker() {
575 LayoutRawBlock* b = _blocks;
576 while (b != nullptr) {
577 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
578 return b;
579 }
580 b = b->next_block();
581 }
582 ShouldNotReachHere();
583 }
584
585 void FieldLayout::remove_null_marker() {
586 LayoutRawBlock* b = first_field_block();
587 while (b != nullptr) {
588 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
589 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
590 LayoutRawBlock* n = b->next_block();
591 remove(b);
592 n->set_offset(b->offset());
593 n->set_size(n->size() + b->size());
594 } else {
595 b->set_block_kind(LayoutRawBlock::EMPTY);
596 }
597 return;
598 }
599 b = b->next_block();
600 }
601 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
602 }
603
604 static const char* layout_kind_to_string(LayoutKind lk) {
605 switch(lk) {
606 case LayoutKind::REFERENCE:
607 return "REFERENCE";
608 case LayoutKind::NON_ATOMIC_FLAT:
609 return "NON_ATOMIC_FLAT";
610 case LayoutKind::ATOMIC_FLAT:
611 return "ATOMIC_FLAT";
612 case LayoutKind::NULLABLE_ATOMIC_FLAT:
613 return "NULLABLE_ATOMIC_FLAT";
614 case LayoutKind::UNKNOWN:
615 return "UNKNOWN";
616 default:
617 ShouldNotReachHere();
618 }
619 }
620
621 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields) {
622 ResourceMark rm;
623 LayoutRawBlock* b = _blocks;
624 while(b != _last) {
625 switch(b->block_kind()) {
626 case LayoutRawBlock::REGULAR: {
627 FieldInfo* fi = _field_info->adr_at(b->field_index());
628 output->print_cr(" @%d %s %d/%d \"%s\" %s",
629 b->offset(),
630 "REGULAR",
631 b->size(),
632 b->alignment(),
633 fi->name(_cp)->as_C_string(),
634 fi->signature(_cp)->as_C_string());
635 break;
636 }
637 case LayoutRawBlock::FLAT: {
638 FieldInfo* fi = _field_info->adr_at(b->field_index());
639 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
640 assert(ik != nullptr, "");
641 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
642 b->offset(),
643 "FLAT",
644 b->size(),
645 b->alignment(),
646 fi->name(_cp)->as_C_string(),
647 fi->signature(_cp)->as_C_string(),
648 ik->name()->as_C_string(),
649 ik->class_loader_data(), layout_kind_to_string(b->layout_kind()));
650 break;
651 }
652 case LayoutRawBlock::RESERVED: {
653 output->print_cr(" @%d %s %d/-",
654 b->offset(),
655 "RESERVED",
656 b->size());
657 break;
658 }
659 case LayoutRawBlock::INHERITED: {
660 assert(!is_static, "Static fields are not inherited in layouts");
661 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
662 bool found = false;
663 const InstanceKlass* ik = super;
664 while (!found && ik != nullptr) {
665 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
666 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
667 output->print_cr(" @%d %s %d/%d \"%s\" %s",
668 b->offset(),
669 "INHERITED",
670 b->size(),
671 b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
672 fs.name()->as_C_string(),
673 fs.signature()->as_C_string());
674 found = true;
675 break;
676 }
677 }
678 ik = ik->java_super();
679 }
680 break;
681 }
682 case LayoutRawBlock::EMPTY:
683 output->print_cr(" @%d %s %d/1",
684 b->offset(),
685 "EMPTY",
686 b->size());
687 break;
688 case LayoutRawBlock::PADDING:
689 output->print_cr(" @%d %s %d/1",
690 b->offset(),
691 "PADDING",
692 b->size());
693 break;
694 case LayoutRawBlock::NULL_MARKER:
695 {
696 output->print_cr(" @%d %s %d/1 ",
697 b->offset(),
698 "NULL_MARKER",
699 b->size());
700 break;
701 }
702 default:
703 fatal("Unknown block type");
704 }
705 b = b->next_block();
706 }
707 }
708
709 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
710 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
711 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
712 _classname(classname),
713 _loader_data(loader_data),
714 _super_klass(super_klass),
715 _constant_pool(constant_pool),
716 _field_info(field_info),
717 _info(info),
718 _inline_layout_info_array(inline_layout_info_array),
719 _root_group(nullptr),
720 _contended_groups(GrowableArray<FieldGroup*>(8)),
721 _static_fields(nullptr),
722 _layout(nullptr),
723 _static_layout(nullptr),
724 _nonstatic_oopmap_count(0),
725 _payload_alignment(-1),
726 _first_field_offset(-1),
727 _null_marker_offset(-1),
728 _payload_size_in_bytes(-1),
729 _non_atomic_layout_size_in_bytes(-1),
730 _non_atomic_layout_alignment(-1),
731 _atomic_layout_size_in_bytes(-1),
732 _nullable_layout_size_in_bytes(-1),
733 _fields_size_sum(0),
734 _declared_non_static_fields_count(0),
735 _has_non_naturally_atomic_fields(false),
736 _is_naturally_atomic(false),
737 _must_be_atomic(must_be_atomic),
738 _has_nonstatic_fields(false),
739 _has_inline_type_fields(false),
740 _is_contended(is_contended),
741 _is_inline_type(is_inline_type),
742 _is_abstract_value(is_abstract_value),
743 _has_flattening_information(is_inline_type),
744 _is_empty_inline_class(false) {}
745
746 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
747 assert(g > 0, "must only be called for named contended groups");
748 FieldGroup* fg = nullptr;
749 for (int i = 0; i < _contended_groups.length(); i++) {
750 fg = _contended_groups.at(i);
751 if (fg->contended_group() == g) return fg;
752 }
753 fg = new FieldGroup(g);
754 _contended_groups.append(fg);
755 return fg;
756 }
757
758 void FieldLayoutBuilder::prologue() {
759 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
760 const InstanceKlass* super_klass = _super_klass;
761 _layout->initialize_instance_layout(super_klass);
762 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
763 if (super_klass != nullptr) {
764 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
765 }
766 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
767 _static_layout->initialize_static_layout();
768 _static_fields = new FieldGroup();
769 _root_group = new FieldGroup();
770 }
771
772 // Field sorting for regular (non-inline) classes:
773 // - fields are sorted in static and non-static fields
774 // - non-static fields are also sorted according to their contention group
775 // (support of the @Contended annotation)
776 // - @Contended annotation is ignored for static fields
777 // - field flattening decisions are taken in this method
778 void FieldLayoutBuilder::regular_field_sorting() {
779 int idx = 0;
780 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
781 FieldGroup* group = nullptr;
782 FieldInfo fieldinfo = *it;
783 if (fieldinfo.access_flags().is_static()) {
784 group = _static_fields;
785 } else {
786 _has_nonstatic_fields = true;
787 if (fieldinfo.field_flags().is_contended()) {
788 int g = fieldinfo.contended_group();
789 if (g == 0) {
790 group = new FieldGroup(true);
791 _contended_groups.append(group);
792 } else {
793 group = get_or_create_contended_group(g);
794 }
795 } else {
796 group = _root_group;
797 }
798 }
799 assert(group != nullptr, "invariant");
800 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
801 switch(type) {
802 case T_BYTE:
803 case T_CHAR:
804 case T_DOUBLE:
805 case T_FLOAT:
806 case T_INT:
807 case T_LONG:
808 case T_SHORT:
809 case T_BOOLEAN:
810 group->add_primitive_field(idx, type);
811 break;
812 case T_OBJECT:
813 case T_ARRAY:
814 {
815 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
816 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
817 || (!fieldinfo.field_flags().is_injected()
818 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
819 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
820 _has_inline_type_fields = true;
821 _has_flattening_information = true;
822 }
823 if (lk == LayoutKind::REFERENCE) {
824 if (group != _static_fields) _nonstatic_oopmap_count++;
825 group->add_oop_field(idx);
826 } else {
827 _has_flattening_information = true;
828 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
829 int size, alignment;
830 get_size_and_alignment(vk, lk, &size, &alignment);
831 group->add_flat_field(idx, vk, lk, size, alignment);
832 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
833 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
834 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
835 _field_info->adr_at(idx)->set_layout_kind(lk);
836 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
837 }
838 break;
839 }
840 default:
841 fatal("Something wrong?");
842 }
843 }
844 _root_group->sort_by_size();
845 _static_fields->sort_by_size();
846 if (!_contended_groups.is_empty()) {
847 for (int i = 0; i < _contended_groups.length(); i++) {
848 _contended_groups.at(i)->sort_by_size();
849 }
850 }
851 }
852
853 /* Field sorting for inline classes:
854 * - because inline classes are immutable, the @Contended annotation is ignored
855 * when computing their layout (with only read operation, there's no false
856 * sharing issue)
857 * - this method also records the alignment of the field with the most
858 * constraining alignment, this value is then used as the alignment
859 * constraint when flattening this inline type into another container
860 * - field flattening decisions are taken in this method (those decisions are
861 * currently only based in the size of the fields to be flattened, the size
862 * of the resulting instance is not considered)
863 */
864 void FieldLayoutBuilder::inline_class_field_sorting() {
865 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
866 int alignment = -1;
867 int idx = 0;
868 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
869 FieldGroup* group = nullptr;
870 FieldInfo fieldinfo = *it;
871 int field_alignment = 1;
872 if (fieldinfo.access_flags().is_static()) {
873 group = _static_fields;
874 } else {
875 _has_nonstatic_fields = true;
876 _declared_non_static_fields_count++;
877 group = _root_group;
878 }
879 assert(group != nullptr, "invariant");
880 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
881 switch(type) {
882 case T_BYTE:
883 case T_CHAR:
884 case T_DOUBLE:
885 case T_FLOAT:
886 case T_INT:
887 case T_LONG:
888 case T_SHORT:
889 case T_BOOLEAN:
890 if (group != _static_fields) {
891 field_alignment = type2aelembytes(type); // alignment == size for primitive types
892 }
893 group->add_primitive_field(fieldinfo.index(), type);
894 break;
895 case T_OBJECT:
896 case T_ARRAY:
897 {
898 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
899 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
900 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
901 || (!fieldinfo.field_flags().is_injected()
902 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
903 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
904 _has_inline_type_fields = true;
905 _has_flattening_information = true;
906 }
907 if (lk == LayoutKind::REFERENCE) {
908 if (group != _static_fields) {
909 _nonstatic_oopmap_count++;
910 field_alignment = type2aelembytes(type); // alignment == size for oops
911 }
912 group->add_oop_field(idx);
913 } else {
914 _has_flattening_information = true;
915 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
916 if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
917 int size, alignment;
918 get_size_and_alignment(vk, lk, &size, &alignment);
919 group->add_flat_field(idx, vk, lk, size, alignment);
920 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
921 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
922 field_alignment = alignment;
923 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
924 _field_info->adr_at(idx)->set_layout_kind(lk);
925 }
926 break;
927 }
928 default:
929 fatal("Unexpected BasicType");
930 }
931 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
932 }
933 _payload_alignment = alignment;
934 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
935 }
936
937 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
938 if (ContendedPaddingWidth > 0) {
939 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
940 _layout->insert(slot, padding);
941 }
942 }
943
944 /* Computation of regular classes layout is an evolution of the previous default layout
945 * (FieldAllocationStyle 1):
946 * - primitive fields (both primitive types and flat inline types) are allocated
947 * first, from the biggest to the smallest
948 * - then oop fields are allocated (to increase chances to have contiguous oops and
949 * a simpler oopmap).
950 */
951 void FieldLayoutBuilder::compute_regular_layout() {
952 bool need_tail_padding = false;
953 prologue();
954 regular_field_sorting();
955 if (_is_contended) {
956 _layout->set_start(_layout->last_block());
957 // insertion is currently easy because the current strategy doesn't try to fill holes
958 // in super classes layouts => the _start block is by consequence the _last_block
959 insert_contended_padding(_layout->start());
960 need_tail_padding = true;
961 }
962 _layout->add(_root_group->big_primitive_fields());
963 _layout->add(_root_group->small_primitive_fields());
964 _layout->add(_root_group->oop_fields());
965
966 if (!_contended_groups.is_empty()) {
967 for (int i = 0; i < _contended_groups.length(); i++) {
968 FieldGroup* cg = _contended_groups.at(i);
969 LayoutRawBlock* start = _layout->last_block();
970 insert_contended_padding(start);
971 _layout->add(cg->big_primitive_fields());
972 _layout->add(cg->small_primitive_fields(), start);
973 _layout->add(cg->oop_fields(), start);
974 need_tail_padding = true;
975 }
976 }
977
978 if (need_tail_padding) {
979 insert_contended_padding(_layout->last_block());
980 }
981
982 // Warning: IntanceMirrorKlass expects static oops to be allocated first
983 _static_layout->add_contiguously(_static_fields->oop_fields());
984 _static_layout->add(_static_fields->big_primitive_fields());
985 _static_layout->add(_static_fields->small_primitive_fields());
986
987 epilogue();
988 }
989
990 /* Computation of inline classes has a slightly different strategy than for
991 * regular classes. Regular classes have their oop fields allocated at the end
992 * of the layout to increase GC performances. Unfortunately, this strategy
993 * increases the number of empty slots inside an instance. Because the purpose
994 * of inline classes is to be embedded into other containers, it is critical
995 * to keep their size as small as possible. For this reason, the allocation
996 * strategy is:
997 * - big primitive fields (primitive types and flat inline type smaller
998 * than an oop) are allocated first (from the biggest to the smallest)
999 * - then oop fields
1000 * - then small primitive fields (from the biggest to the smallest)
1001 */
1002 void FieldLayoutBuilder::compute_inline_class_layout() {
1003
1004 // Test if the concrete inline class is an empty class (no instance fields)
1005 // and insert a dummy field if needed
1006 if (!_is_abstract_value) {
1007 bool declares_non_static_fields = false;
1008 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
1009 FieldInfo fieldinfo = *it;
1010 if (!fieldinfo.access_flags().is_static()) {
1011 declares_non_static_fields = true;
1012 break;
1013 }
1014 }
1015 if (!declares_non_static_fields) {
1016 bool has_inherited_fields = false;
1017 const InstanceKlass* super = _super_klass;
1018 while(super != nullptr) {
1019 if (super->has_nonstatic_fields()) {
1020 has_inherited_fields = true;
1021 break;
1022 }
1023 super = super->super() == nullptr ? nullptr : InstanceKlass::cast(super->super());
1024 }
1025
1026 if (!has_inherited_fields) {
1027 // Inject ".empty" dummy field
1028 _is_empty_inline_class = true;
1029 FieldInfo::FieldFlags fflags(0);
1030 fflags.update_injected(true);
1031 AccessFlags aflags;
1032 FieldInfo fi(aflags,
1033 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1034 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1035 0,
1036 fflags);
1037 int idx = _field_info->append(fi);
1038 _field_info->adr_at(idx)->set_index(idx);
1039 }
1040 }
1041 }
1042
1043 prologue();
1044 inline_class_field_sorting();
1045
1046 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1047
1048 if (_layout->super_has_fields() && !_is_abstract_value) { // non-static field layout
1049 if (!_has_nonstatic_fields) {
1050 assert(_is_abstract_value, "Concrete value types have at least one field");
1051 // Nothing to do
1052 } else {
1053 // decide which alignment to use, then set first allowed field offset
1054
1055 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1056 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1057
1058 if (_payload_alignment < _layout->super_alignment()) {
1059 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1060 assert(new_alignment % _payload_alignment == 0, "Must be");
1061 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1062 _payload_alignment = new_alignment;
1063 }
1064 _layout->set_start(_layout->first_field_block());
1065 }
1066 } else {
1067 if (_is_abstract_value && _has_nonstatic_fields) {
1068 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1069 }
1070 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1071 LayoutRawBlock* first_empty = _layout->start()->next_block();
1072 if (first_empty->offset() % _payload_alignment != 0) {
1073 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1074 _layout->insert(first_empty, padding);
1075 if (first_empty->size() == 0) {
1076 _layout->remove(first_empty);
1077 }
1078 _layout->set_start(padding);
1079 }
1080 }
1081
1082 _layout->add(_root_group->big_primitive_fields());
1083 _layout->add(_root_group->oop_fields());
1084 _layout->add(_root_group->small_primitive_fields());
1085
1086 LayoutRawBlock* first_field = _layout->first_field_block();
1087 if (first_field != nullptr) {
1088 _first_field_offset = _layout->first_field_block()->offset();
1089 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1090 } else {
1091 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1092 _first_field_offset = _layout->blocks()->size();
1093 _payload_size_in_bytes = 0;
1094 }
1095
1096 // Determining if the value class is naturally atomic:
1097 if ((!_layout->super_has_fields() && _declared_non_static_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1098 || (_layout->super_has_fields() && _super_klass->is_naturally_atomic() && _declared_non_static_fields_count == 0)) {
1099 _is_naturally_atomic = true;
1100 }
1101
1102 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1103 // From this, additional layouts will be computed: atomic and nullable layouts
1104 // Once those additional layouts are computed, the raw layout might need some adjustments
1105
1106 if (!_is_abstract_value) { // Flat layouts are only for concrete value classes
1107 // Validation of the non atomic layout
1108 if ((InlineFieldMaxFlatSize < 0 || _payload_size_in_bytes * BitsPerByte <= InlineFieldMaxFlatSize)
1109 && (!_must_be_atomic || _is_naturally_atomic)) {
1110 _non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1111 _non_atomic_layout_alignment = _payload_alignment;
1112 }
1113
1114 // Next step is to compute the characteristics for a layout enabling atomic updates
1115 if (AtomicFieldFlattening) {
1116 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1117 if ( atomic_size <= (int)MAX_ATOMIC_OP_SIZE
1118 && (InlineFieldMaxFlatSize < 0 || atomic_size * BitsPerByte <= InlineFieldMaxFlatSize)) {
1119 _atomic_layout_size_in_bytes = atomic_size;
1120 }
1121 }
1122
1123 // Next step is the nullable layout: the layout must include a null marker and must also be atomic
1124 if (NullableFieldFlattening) {
1125 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1126 // FIXME: could it be possible to re-use the .empty field as a null marker for empty values?
1127 LayoutRawBlock* b = _layout->first_field_block();
1128 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1129 int null_marker_offset = -1;
1130 if (_is_empty_inline_class) {
1131 // Reusing the dummy field as a field marker
1132 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1133 null_marker_offset = b->offset();
1134 } else {
1135 while (b != _layout->last_block()) {
1136 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1137 break;
1138 }
1139 b = b->next_block();
1140 }
1141 if (b != _layout->last_block()) {
1142 // found an empty slot, register its offset from the beginning of the payload
1143 null_marker_offset = b->offset();
1144 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1145 _layout->add_field_at_offset(marker, b->offset());
1146 }
1147 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1148 int last_offset = _layout->last_block()->offset();
1149 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1150 _layout->insert_field_block(_layout->last_block(), marker);
1151 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1152 null_marker_offset = marker->offset();
1153 }
1154 }
1155
1156 // Now that the null marker is there, the size of the nullable layout must computed (remember, must be atomic too)
1157 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1158 int nullable_size = round_up_power_of_2(new_raw_size);
1159 if (nullable_size <= (int)MAX_ATOMIC_OP_SIZE
1160 && (InlineFieldMaxFlatSize < 0 || nullable_size * BitsPerByte <= InlineFieldMaxFlatSize)) {
1161 _nullable_layout_size_in_bytes = nullable_size;
1162 _null_marker_offset = null_marker_offset;
1163 } else {
1164 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1165 // from the layout, otherwise it will appear anyway if the layout is printer
1166 _layout->remove_null_marker();
1167 _null_marker_offset = -1;
1168 }
1169 }
1170 // If the inline class has an atomic or nullable (which is also atomic) layout,
1171 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1172 // could remain simple (single instruction without intermediate copy). This might required
1173 // to shift all fields in the raw layout, but this operation is possible only if the class
1174 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1175 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1176 // and loosely consistent are supported.
1177 int required_alignment = _payload_alignment;
1178 if (has_atomic_layout() && _payload_alignment < atomic_layout_size_in_bytes()) {
1179 required_alignment = atomic_layout_size_in_bytes();
1180 }
1181 if (has_nullable_layout() && _payload_alignment < nullable_layout_size_in_bytes()) {
1182 required_alignment = nullable_layout_size_in_bytes();
1183 }
1184 int shift = first_field->offset() % required_alignment;
1185 if (shift != 0) {
1186 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1187 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1188 _layout->shift_fields(shift);
1189 _first_field_offset = _layout->first_field_block()->offset();
1190 if (has_nullable_layout()) {
1191 assert(!_is_empty_inline_class, "Should not get here with empty values");
1192 _null_marker_offset = _layout->find_null_marker()->offset();
1193 }
1194 _payload_alignment = required_alignment;
1195 } else {
1196 _atomic_layout_size_in_bytes = -1;
1197 if (has_nullable_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1198 _layout->remove_null_marker();
1199 }
1200 _nullable_layout_size_in_bytes = -1;
1201 _null_marker_offset = -1;
1202 }
1203 } else {
1204 _payload_alignment = required_alignment;
1205 }
1206
1207 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1208 // instances must also be the nullable layout, in order to be able to set the null marker to
1209 // non-null before copying the payload to other containers.
1210 if (has_nullable_layout() && payload_layout_size_in_bytes() < nullable_layout_size_in_bytes()) {
1211 _payload_size_in_bytes = nullable_layout_size_in_bytes();
1212 }
1213 }
1214 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1215 _static_layout->add_contiguously(_static_fields->oop_fields());
1216 _static_layout->add(_static_fields->big_primitive_fields());
1217 _static_layout->add(_static_fields->small_primitive_fields());
1218
1219 epilogue();
1220 }
1221
1222 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1223 InlineKlass* vklass, int offset) {
1224 int diff = offset - vklass->first_field_offset();
1225 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1226 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1227 while (map < last_map) {
1228 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1229 map++;
1230 }
1231 }
1232
1233 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1234 if (list == nullptr) return;
1235 for (int i = 0; i < list->length(); i++) {
1236 LayoutRawBlock* f = list->at(i);
1237 if (f->block_kind() == LayoutRawBlock::FLAT) {
1238 InlineKlass* vk = f->inline_klass();
1239 assert(vk != nullptr, "Should have been initialized");
1240 if (vk->contains_oops()) {
1241 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1242 }
1243 }
1244 }
1245 }
1246
1247 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1248 if (group->oop_fields() != nullptr) {
1249 for (int i = 0; i < group->oop_fields()->length(); i++) {
1250 LayoutRawBlock* b = group->oop_fields()->at(i);
1251 nonstatic_oop_maps->add(b->offset(), 1);
1252 }
1253 }
1254 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1255 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1256 }
1257
1258 void FieldLayoutBuilder::epilogue() {
1259 // Computing oopmaps
1260 OopMapBlocksBuilder* nonstatic_oop_maps =
1261 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1262 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1263 if (super_oop_map_count > 0) {
1264 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1265 _super_klass->nonstatic_oop_map_count());
1266 }
1267 register_embedded_oops(nonstatic_oop_maps, _root_group);
1268 if (!_contended_groups.is_empty()) {
1269 for (int i = 0; i < _contended_groups.length(); i++) {
1270 FieldGroup* cg = _contended_groups.at(i);
1271 if (cg->oop_count() > 0) {
1272 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1273 register_embedded_oops(nonstatic_oop_maps, cg);
1274 }
1275 }
1276 }
1277 nonstatic_oop_maps->compact();
1278
1279 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1280 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1281 int static_fields_size = (static_fields_end -
1282 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1283 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1284
1285 // Pass back information needed for InstanceKlass creation
1286
1287 _info->oop_map_blocks = nonstatic_oop_maps;
1288 _info->_instance_size = align_object_size(instance_end / wordSize);
1289 _info->_static_field_size = static_fields_size;
1290 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1291 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1292 _info->_has_inline_fields = _has_inline_type_fields;
1293 _info->_is_naturally_atomic = _is_naturally_atomic;
1294 if (_is_inline_type) {
1295 _info->_must_be_atomic = _must_be_atomic;
1296 _info->_payload_alignment = _payload_alignment;
1297 _info->_first_field_offset = _first_field_offset;
1298 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1299 _info->_non_atomic_size_in_bytes = _non_atomic_layout_size_in_bytes;
1300 _info->_non_atomic_alignment = _non_atomic_layout_alignment;
1301 _info->_atomic_layout_size_in_bytes = _atomic_layout_size_in_bytes;
1302 _info->_nullable_layout_size_in_bytes = _nullable_layout_size_in_bytes;
1303 _info->_null_marker_offset = _null_marker_offset;
1304 _info->_default_value_offset = _static_layout->default_value_offset();
1305 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1306 _info->_is_empty_inline_klass = _is_empty_inline_class;
1307 }
1308
1309 // This may be too restrictive, since if all the fields fit in 64
1310 // bits we could make the decision to align instances of this class
1311 // to 64-bit boundaries, and load and store them as single words.
1312 // And on machines which supported larger atomics we could similarly
1313 // allow larger values to be atomic, if properly aligned.
1314
1315 #ifdef ASSERT
1316 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1317 // which prints the details of LayoutRawBlocks used to compute the layout.
1318 // The code below checks that offsets in the _field_info meta-data match offsets
1319 // in the LayoutRawBlocks
1320 LayoutRawBlock* b = _layout->blocks();
1321 while(b != _layout->last_block()) {
1322 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1323 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1324 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1325 }
1326 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1327 }
1328 b = b->next_block();
1329 }
1330 b = _static_layout->blocks();
1331 while(b != _static_layout->last_block()) {
1332 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1333 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1334 }
1335 b = b->next_block();
1336 }
1337 #endif // ASSERT
1338
1339 static bool first_layout_print = true;
1340
1341
1342 if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
1343 ResourceMark rm;
1344 stringStream st;
1345 if (first_layout_print) {
1346 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1347 st.print_cr("Heap oop size = %d", heapOopSize);
1348 first_layout_print = false;
1349 }
1350 if (_super_klass != nullptr) {
1351 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1352 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1353 } else {
1354 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1355 }
1356 st.print_cr("Instance fields:");
1357 _layout->print(&st, false, _super_klass, _inline_layout_info_array);
1358 st.print_cr("Static fields:");
1359 _static_layout->print(&st, true, nullptr, _inline_layout_info_array);
1360 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1361 if (_is_inline_type) {
1362 st.print_cr("First field offset = %d", _first_field_offset);
1363 st.print_cr("Payload layout: %d/%d", _payload_size_in_bytes, _payload_alignment);
1364 if (has_non_atomic_flat_layout()) {
1365 st.print_cr("Non atomic flat layout: %d/%d", _non_atomic_layout_size_in_bytes, _non_atomic_layout_alignment);
1366 } else {
1367 st.print_cr("Non atomic flat layout: -/-");
1368 }
1369 if (has_atomic_layout()) {
1370 st.print_cr("Atomic flat layout: %d/%d", _atomic_layout_size_in_bytes, _atomic_layout_size_in_bytes);
1371 } else {
1372 st.print_cr("Atomic flat layout: -/-");
1373 }
1374 if (has_nullable_layout()) {
1375 st.print_cr("Nullable flat layout: %d/%d", _nullable_layout_size_in_bytes, _nullable_layout_size_in_bytes);
1376 } else {
1377 st.print_cr("Nullable flat layout: -/-");
1378 }
1379 if (_null_marker_offset != -1) {
1380 st.print_cr("Null marker offset = %d", _null_marker_offset);
1381 }
1382 }
1383 st.print_cr("---");
1384 // Print output all together.
1385 tty->print_raw(st.as_string());
1386 }
1387 }
1388
1389 void FieldLayoutBuilder::build_layout() {
1390 if (_is_inline_type || _is_abstract_value) {
1391 compute_inline_class_layout();
1392 } else {
1393 compute_regular_layout();
1394 }
1395 }
|