1 /*
   2  * Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/classFileParser.hpp"
  27 #include "classfile/fieldLayoutBuilder.hpp"
  28 #include "classfile/systemDictionary.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "jvm.h"
  31 #include "memory/resourceArea.hpp"
  32 #include "oops/array.hpp"
  33 #include "oops/fieldStreams.inline.hpp"
  34 #include "oops/instanceMirrorKlass.hpp"
  35 #include "oops/instanceKlass.inline.hpp"
  36 #include "oops/klass.inline.hpp"
  37 #include "oops/inlineKlass.inline.hpp"
  38 #include "runtime/fieldDescriptor.inline.hpp"
  39 
  40 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
  41   _next_block(nullptr),
  42   _prev_block(nullptr),
  43   _inline_klass(nullptr),
  44   _kind(kind),
  45   _offset(-1),
  46   _alignment(1),
  47   _size(size),
  48   _field_index(-1),
  49   _null_marker_offset(-1),
  50   _is_reference(false),
  51   _needs_null_marker(false) {
  52   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
  53          "Otherwise, should use the constructor with a field index argument");
  54   assert(size > 0, "Sanity check");
  55 }
  56 
  57 
  58 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
  59  _next_block(nullptr),
  60  _prev_block(nullptr),
  61  _inline_klass(nullptr),
  62  _kind(kind),
  63  _offset(-1),
  64  _alignment(alignment),
  65  _size(size),
  66  _field_index(index),
  67  _null_marker_offset(-1),
  68  _is_reference(is_reference),
  69  _needs_null_marker(false) {
  70   assert(kind == REGULAR || kind == FLAT || kind == INHERITED || kind == INHERITED_NULL_MARKER,
  71          "Other kind do not have a field index");
  72   assert(size > 0, "Sanity check");
  73   assert(alignment > 0, "Sanity check");
  74 }
  75 
  76 bool LayoutRawBlock::fit(int size, int alignment) {
  77   int adjustment = 0;
  78   if ((_offset % alignment) != 0) {
  79     adjustment = alignment - (_offset % alignment);
  80   }
  81   return _size >= size + adjustment;
  82 }
  83 
  84 FieldGroup::FieldGroup(int contended_group) :
  85   _next(nullptr),
  86   _small_primitive_fields(nullptr),
  87   _big_primitive_fields(nullptr),
  88   _oop_fields(nullptr),
  89   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
  90   _oop_count(0) {}
  91 
  92 void FieldGroup::add_primitive_field(int idx, BasicType type) {
  93   int size = type2aelembytes(type);
  94   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
  95   if (size >= oopSize) {
  96     add_to_big_primitive_list(block);
  97   } else {
  98     add_to_small_primitive_list(block);
  99   }
 100 }
 101 
 102 void FieldGroup::add_oop_field(int idx) {
 103   int size = type2aelembytes(T_OBJECT);
 104   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
 105   if (_oop_fields == nullptr) {
 106     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 107   }
 108   _oop_fields->append(block);
 109   _oop_count++;
 110 }
 111 
 112 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, bool needs_null_marker) {
 113   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, vk->get_payload_size_in_bytes(), vk->get_alignment(), false);
 114   block->set_inline_klass(vk);
 115   if (needs_null_marker) block->set_needs_null_marker();
 116   if (block->size() >= oopSize) {
 117     add_to_big_primitive_list(block);
 118   } else {
 119     add_to_small_primitive_list(block);
 120   }
 121 }
 122 
 123 void FieldGroup::sort_by_size() {
 124   if (_small_primitive_fields != nullptr) {
 125     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 126   }
 127   if (_big_primitive_fields != nullptr) {
 128     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 129   }
 130 }
 131 
 132 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 133   if (_small_primitive_fields == nullptr) {
 134     _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 135   }
 136   _small_primitive_fields->append(block);
 137 }
 138 
 139 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 140   if (_big_primitive_fields == nullptr) {
 141     _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 142   }
 143   _big_primitive_fields->append(block);
 144 }
 145 
 146 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
 147   _field_info(field_info),
 148   _cp(cp),
 149   _blocks(nullptr),
 150   _start(_blocks),
 151   _last(_blocks),
 152   _super_first_field_offset(-1),
 153   _super_alignment(-1),
 154   _super_min_align_required(-1),
 155   _super_has_fields(false),
 156   _has_missing_null_markers(false) {}
 157 
 158 void FieldLayout::initialize_static_layout() {
 159   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 160   _blocks->set_offset(0);
 161   _last = _blocks;
 162   _start = _blocks;
 163   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 164   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 165   // of static field is computed. Field offsets are fixed later when the size is known
 166   // (see java_lang_Class::fixup_mirror())
 167   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 168     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 169     _blocks->set_offset(0);
 170   }
 171 }
 172 
 173 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
 174   if (super_klass == nullptr) {
 175     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 176     _blocks->set_offset(0);
 177     _last = _blocks;
 178     _start = _blocks;
 179     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 180   } else {
 181     _super_has_fields = reconstruct_layout(super_klass);
 182     fill_holes(super_klass);
 183     if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !_super_has_fields) {
 184       _start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses
 185       // to allocate fields in empty slots of their super classes
 186     } else {
 187       _start = _last;    // append fields at the end of the reconstructed layout
 188     }
 189   }
 190 }
 191 
 192 LayoutRawBlock* FieldLayout::first_field_block() {
 193   LayoutRawBlock* block = _blocks;
 194   while (block != nullptr
 195          && block->kind() != LayoutRawBlock::INHERITED
 196          && block->kind() != LayoutRawBlock::REGULAR
 197          && block->kind() != LayoutRawBlock::FLAT) {
 198     block = block->next_block();
 199   }
 200   return block;
 201 }
 202 
 203 // Insert a set of fields into a layout.
 204 // For each field, search for an empty slot able to fit the field
 205 // (satisfying both size and alignment requirements), if none is found,
 206 // add the field at the end of the layout.
 207 // Fields cannot be inserted before the block specified in the "start" argument
 208 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 209   if (list == nullptr) return;
 210   if (start == nullptr) start = this->_start;
 211   bool last_search_success = false;
 212   int last_size = 0;
 213   int last_alignment = 0;
 214   for (int i = 0; i < list->length(); i ++) {
 215     LayoutRawBlock* b = list->at(i);
 216     LayoutRawBlock* cursor = nullptr;
 217     LayoutRawBlock* candidate = nullptr;
 218     // if start is the last block, just append the field
 219     if (start == last_block()) {
 220       candidate = last_block();
 221     }
 222     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 223     // check if the previous field had the same requirements and if the search for a fitting slot
 224     // was successful. If the requirements were the same but the search failed, a new search will
 225     // fail the same way, so just append the field at the of the layout.
 226     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 227       candidate = last_block();
 228     } else {
 229       // Iterate over the layout to find an empty slot fitting the field's requirements
 230       last_size = b->size();
 231       last_alignment = b->alignment();
 232       cursor = last_block()->prev_block();
 233       assert(cursor != nullptr, "Sanity check");
 234       last_search_success = true;
 235 
 236       while (cursor != start) {
 237         if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 238           if (candidate == nullptr || cursor->size() < candidate->size()) {
 239             candidate = cursor;
 240           }
 241         }
 242         cursor = cursor->prev_block();
 243       }
 244       if (candidate == nullptr) {
 245         candidate = last_block();
 246         last_search_success = false;
 247       }
 248       assert(candidate != nullptr, "Candidate must not be null");
 249       assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 250       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 251     }
 252     insert_field_block(candidate, b);
 253   }
 254 }
 255 
 256 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 257 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 258   assert(block != nullptr, "Sanity check");
 259   block->set_offset(offset);
 260   if (start == nullptr) {
 261     start = this->_start;
 262   }
 263   LayoutRawBlock* slot = start;
 264   while (slot != nullptr) {
 265     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 266         slot == _last){
 267       assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 268       assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
 269       if (slot->offset() < block->offset()) {
 270         int adjustment = block->offset() - slot->offset();
 271         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 272         insert(slot, adj);
 273       }
 274       insert(slot, block);
 275       if (slot->size() == 0) {
 276         remove(slot);
 277       }
 278       if (block->kind() == LayoutRawBlock::REGULAR || block->kind() == LayoutRawBlock::FLAT) {
 279         _field_info->adr_at(block->field_index())->set_offset(block->offset());
 280       }
 281       return;
 282     }
 283     slot = slot->next_block();
 284   }
 285   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 286 }
 287 
 288 // The allocation logic uses a best fit strategy: the set of fields is allocated
 289 // in the first empty slot big enough to contain the whole set ((including padding
 290 // to fit alignment constraints).
 291 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 292   if (list == nullptr) return;
 293   if (start == nullptr) {
 294     start = _start;
 295   }
 296   // This code assumes that if the first block is well aligned, the following
 297   // blocks would naturally be well aligned (no need for adjustment)
 298   int size = 0;
 299   for (int i = 0; i < list->length(); i++) {
 300     size += list->at(i)->size();
 301   }
 302 
 303   LayoutRawBlock* candidate = nullptr;
 304   if (start == last_block()) {
 305     candidate = last_block();
 306   } else {
 307     LayoutRawBlock* first = list->at(0);
 308     candidate = last_block()->prev_block();
 309     while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 310       if (candidate == start) {
 311         candidate = last_block();
 312         break;
 313       }
 314       candidate = candidate->prev_block();
 315     }
 316     assert(candidate != nullptr, "Candidate must not be null");
 317     assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 318     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 319   }
 320 
 321   for (int i = 0; i < list->length(); i++) {
 322     LayoutRawBlock* b = list->at(i);
 323     insert_field_block(candidate, b);
 324     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 325   }
 326 }
 327 
 328 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 329   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 330   if (slot->offset() % block->alignment() != 0) {
 331     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 332     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 333     insert(slot, adj);
 334   }
 335   assert(block->size() >= block->size(), "Enough space must remain afte adjustment");
 336   insert(slot, block);
 337   if (block->needs_null_marker()) {
 338     _has_missing_null_markers = true;
 339   }
 340   if (slot->size() == 0) {
 341     remove(slot);
 342   }
 343   // NULL_MARKER blocks have a field index pointing to the field that needs a null marker,
 344   // so the field_info at this index must not be updated with the null marker's offset
 345   if (block->kind() != LayoutRawBlock::NULL_MARKER) {
 346     _field_info->adr_at(block->field_index())->set_offset(block->offset());
 347   }
 348   return block;
 349 }
 350 
 351 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
 352   bool has_instance_fields = false;
 353   if (ik->is_abstract() && !ik->is_identity_class()) {
 354     _super_alignment = type2aelembytes(BasicType::T_LONG);
 355   }
 356   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 357   while (ik != nullptr) {
 358     for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 359       BasicType type = Signature::basic_type(fs.signature());
 360       // distinction between static and non-static fields is missing
 361       if (fs.access_flags().is_static()) continue;
 362       has_instance_fields = true;
 363       if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) _super_first_field_offset = fs.offset();
 364       LayoutRawBlock* block;
 365       if (fs.is_flat()) {
 366         InlineKlass* vk = InlineKlass::cast(ik->get_inline_type_field_klass(fs.index()));
 367         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, vk->get_payload_size_in_bytes(),
 368                                    vk->get_alignment(), false);
 369         assert(_super_alignment == -1 || _super_alignment >=  vk->get_alignment(), "Invalid value alignment");
 370         _super_min_align_required = _super_min_align_required > vk->get_alignment() ? _super_min_align_required : vk->get_alignment();
 371         if (!fs.field_flags().is_null_free_inline_type()) {
 372           assert(fs.field_flags().has_null_marker(), "Nullable flat fields must have a null marker");
 373           LayoutRawBlock* marker = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED_NULL_MARKER, 1 /* current NULL_MARKER block are one byte */,
 374                                     1, false);
 375           marker->set_offset(fs.null_marker_offset());
 376           all_fields->append(marker);
 377         }
 378       } else {
 379         int size = type2aelembytes(type);
 380         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 381         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
 382         // For primitive types, the alignment is equal to the size
 383         assert(_super_alignment == -1 || _super_alignment >=  size, "Invalid value alignment");
 384         _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
 385       }
 386       block->set_offset(fs.offset());
 387       all_fields->append(block);
 388     }
 389     ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
 390   }
 391   all_fields->sort(LayoutRawBlock::compare_offset);
 392   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 393   _blocks->set_offset(0);
 394   _last = _blocks;
 395   for(int i = 0; i < all_fields->length(); i++) {
 396     LayoutRawBlock* b = all_fields->at(i);
 397     _last->set_next_block(b);
 398     b->set_prev_block(_last);
 399     _last = b;
 400   }
 401   _start = _blocks;
 402   return has_instance_fields;
 403 }
 404 
 405 // Called during the reconstruction of a layout, after fields from super
 406 // classes have been inserted. It fills unused slots between inserted fields
 407 // with EMPTY blocks, so the regular field insertion methods would work.
 408 // This method handles classes with @Contended annotations differently
 409 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 410 // fields to interfere with contended fields/classes.
 411 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 412   assert(_blocks != nullptr, "Sanity check");
 413   assert(_blocks->offset() == 0, "first block must be at offset zero");
 414   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 415   LayoutRawBlock* b = _blocks;
 416   while (b->next_block() != nullptr) {
 417     if (b->next_block()->offset() > (b->offset() + b->size())) {
 418       int size = b->next_block()->offset() - (b->offset() + b->size());
 419       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 420       empty->set_offset(b->offset() + b->size());
 421       empty->set_next_block(b->next_block());
 422       b->next_block()->set_prev_block(empty);
 423       b->set_next_block(empty);
 424       empty->set_prev_block(b);
 425     }
 426     b = b->next_block();
 427   }
 428   assert(b->next_block() == nullptr, "Invariant at this point");
 429   assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
 430   // If the super class has @Contended annotation, a padding block is
 431   // inserted at the end to ensure that fields from the subclasses won't share
 432   // the cache line of the last field of the contended class
 433   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 434     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 435     p->set_offset(b->offset() + b->size());
 436     b->set_next_block(p);
 437     p->set_prev_block(b);
 438     b = p;
 439   }
 440   if (!UseEmptySlotsInSupers) {
 441     // Add an empty slots to align fields of the subclass on a heapOopSize boundary
 442     // in order to emulate the behavior of the previous algorithm
 443     int align = (b->offset() + b->size()) % heapOopSize;
 444     if (align != 0) {
 445       int sz = heapOopSize - align;
 446       LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
 447       p->set_offset(b->offset() + b->size());
 448       b->set_next_block(p);
 449       p->set_prev_block(b);
 450       b = p;
 451     }
 452   }
 453   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 454   last->set_offset(b->offset() + b->size());
 455   assert(last->offset() > 0, "Sanity check");
 456   b->set_next_block(last);
 457   last->set_prev_block(b);
 458   _last = last;
 459 }
 460 
 461 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 462   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 463   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 464   block->set_offset(slot->offset());
 465   slot->set_offset(slot->offset() + block->size());
 466   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 467   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 468   slot->set_size(slot->size() - block->size());
 469   block->set_prev_block(slot->prev_block());
 470   block->set_next_block(slot);
 471   slot->set_prev_block(block);
 472   if (block->prev_block() != nullptr) {
 473     block->prev_block()->set_next_block(block);
 474   }
 475   if (_blocks == slot) {
 476     _blocks = block;
 477   }
 478   if (_start == slot) {
 479     _start = block;
 480   }
 481   return block;
 482 }
 483 
 484 void FieldLayout::remove(LayoutRawBlock* block) {
 485   assert(block != nullptr, "Sanity check");
 486   assert(block != _last, "Sanity check");
 487   if (_blocks == block) {
 488     _blocks = block->next_block();
 489     if (_blocks != nullptr) {
 490       _blocks->set_prev_block(nullptr);
 491     }
 492   } else {
 493     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 494     block->prev_block()->set_next_block(block->next_block());
 495     block->next_block()->set_prev_block(block->prev_block());
 496   }
 497   if (block == _start) {
 498     _start = block->prev_block();
 499   }
 500 }
 501 
 502 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineKlass*>* inline_fields) {
 503   ResourceMark rm;
 504   LayoutRawBlock* b = _blocks;
 505   while(b != _last) {
 506     switch(b->kind()) {
 507       case LayoutRawBlock::REGULAR: {
 508         FieldInfo* fi = _field_info->adr_at(b->field_index());
 509         output->print_cr(" @%d %s %d/%d \"%s\" %s",
 510                          b->offset(),
 511                          "REGULAR",
 512                          b->size(),
 513                          b->alignment(),
 514                          fi->name(_cp)->as_C_string(),
 515                          fi->signature(_cp)->as_C_string());
 516         break;
 517       }
 518       case LayoutRawBlock::FLAT: {
 519         FieldInfo* fi = _field_info->adr_at(b->field_index());
 520         InlineKlass* ik = inline_fields->at(fi->index());
 521         assert(ik != nullptr, "");
 522         output->print(" @%d %s %d/%d \"%s\" %s %s@%p",
 523                          b->offset(),
 524                          "FLAT",
 525                          b->size(),
 526                          b->alignment(),
 527                          fi->name(_cp)->as_C_string(),
 528                          fi->signature(_cp)->as_C_string(),
 529                          ik->name()->as_C_string(),
 530                          ik->class_loader_data());
 531         if (fi->field_flags().has_null_marker()) {
 532           output->print_cr(" null marker offset %d %s", fi->null_marker_offset(),
 533                            fi->field_flags().is_null_marker_internal() ? "(internal)" : "");
 534         } else {
 535           output->print_cr("");
 536         }
 537         break;
 538       }
 539       case LayoutRawBlock::RESERVED: {
 540         output->print_cr(" @%d %s %d/-",
 541                          b->offset(),
 542                          "RESERVED",
 543                          b->size());
 544         break;
 545       }
 546       case LayoutRawBlock::INHERITED: {
 547         assert(!is_static, "Static fields are not inherited in layouts");
 548         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 549         bool found = false;
 550         const InstanceKlass* ik = super;
 551         while (!found && ik != nullptr) {
 552           for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 553             if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
 554               output->print_cr(" @%d %s %d/%d \"%s\" %s",
 555                   b->offset(),
 556                   "INHERITED",
 557                   b->size(),
 558                   b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
 559                   fs.name()->as_C_string(),
 560                   fs.signature()->as_C_string());
 561               found = true;
 562               break;
 563             }
 564         }
 565         ik = ik->java_super();
 566       }
 567       break;
 568     }
 569     case LayoutRawBlock::INHERITED_NULL_MARKER :
 570       output->print_cr(" @%d %s %d/1",
 571                        b->offset(),
 572                       "INHERITED_NULL_MARKER",
 573                        b->size());
 574       break;
 575     case LayoutRawBlock::EMPTY:
 576       output->print_cr(" @%d %s %d/1",
 577                        b->offset(),
 578                       "EMPTY",
 579                        b->size());
 580       break;
 581     case LayoutRawBlock::PADDING:
 582       output->print_cr(" @%d %s %d/1",
 583                       b->offset(),
 584                       "PADDING",
 585                       b->size());
 586       break;
 587     case LayoutRawBlock::NULL_MARKER:
 588     {
 589       FieldInfo* fi = _field_info->adr_at(b->field_index());
 590       output->print_cr(" @%d %s %d/1 null marker for field at offset %d",
 591                       b->offset(),
 592                       "NULL_MARKER",
 593                       b->size(),
 594                       fi->offset());
 595       break;
 596     }
 597     default:
 598       fatal("Unknown block type");
 599     }
 600     b = b->next_block();
 601   }
 602 }
 603 
 604 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 605                                        GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
 606                                        FieldLayoutInfo* info, Array<InlineKlass*>* inline_type_field_klasses) :
 607   _classname(classname),
 608   _loader_data(loader_data),
 609   _super_klass(super_klass),
 610   _constant_pool(constant_pool),
 611   _field_info(field_info),
 612   _info(info),
 613   _inline_type_field_klasses(inline_type_field_klasses),
 614   _root_group(nullptr),
 615   _contended_groups(GrowableArray<FieldGroup*>(8)),
 616   _static_fields(nullptr),
 617   _layout(nullptr),
 618   _static_layout(nullptr),
 619   _nonstatic_oopmap_count(0),
 620   _alignment(-1),
 621   _first_field_offset(-1),
 622   _internal_null_marker_offset(-1),
 623   _payload_size_in_bytes(-1),
 624   _atomic_field_count(0),
 625   _fields_size_sum(0),
 626   _has_nonstatic_fields(false),
 627   _has_inline_type_fields(false),
 628   _is_contended(is_contended),
 629   _is_inline_type(is_inline_type),
 630   _is_abstract_value(is_abstract_value),
 631   _has_flattening_information(is_inline_type),
 632   _has_nonatomic_values(false),
 633   _nullable_atomic_flat_candidate(false),
 634   _has_null_markers(false) {}
 635 
 636 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 637   assert(g > 0, "must only be called for named contended groups");
 638   FieldGroup* fg = nullptr;
 639   for (int i = 0; i < _contended_groups.length(); i++) {
 640     fg = _contended_groups.at(i);
 641     if (fg->contended_group() == g) return fg;
 642   }
 643   fg = new FieldGroup(g);
 644   _contended_groups.append(fg);
 645   return fg;
 646 }
 647 
 648 void FieldLayoutBuilder::prologue() {
 649   _layout = new FieldLayout(_field_info, _constant_pool);
 650   const InstanceKlass* super_klass = _super_klass;
 651   _layout->initialize_instance_layout(super_klass);
 652   if (super_klass != nullptr) {
 653     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 654   }
 655   _static_layout = new FieldLayout(_field_info, _constant_pool);
 656   _static_layout->initialize_static_layout();
 657   _static_fields = new FieldGroup();
 658   _root_group = new FieldGroup();
 659 }
 660 
 661 // Field sorting for regular (non-inline) classes:
 662 //   - fields are sorted in static and non-static fields
 663 //   - non-static fields are also sorted according to their contention group
 664 //     (support of the @Contended annotation)
 665 //   - @Contended annotation is ignored for static fields
 666 //   - field flattening decisions are taken in this method
 667 void FieldLayoutBuilder::regular_field_sorting() {
 668   int idx = 0;
 669   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 670     FieldGroup* group = nullptr;
 671     FieldInfo fieldinfo = *it;
 672     if (fieldinfo.access_flags().is_static()) {
 673       group = _static_fields;
 674     } else {
 675       _has_nonstatic_fields = true;
 676       _atomic_field_count++;  // we might decrement this
 677       if (fieldinfo.field_flags().is_contended()) {
 678         int g = fieldinfo.contended_group();
 679         if (g == 0) {
 680           group = new FieldGroup(true);
 681           _contended_groups.append(group);
 682         } else {
 683           group = get_or_create_contended_group(g);
 684         }
 685       } else {
 686         group = _root_group;
 687       }
 688     }
 689     assert(group != nullptr, "invariant");
 690     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 691     switch(type) {
 692     case T_BYTE:
 693     case T_CHAR:
 694     case T_DOUBLE:
 695     case T_FLOAT:
 696     case T_INT:
 697     case T_LONG:
 698     case T_SHORT:
 699     case T_BOOLEAN:
 700       group->add_primitive_field(idx, type);
 701       break;
 702     case T_OBJECT:
 703     case T_ARRAY:
 704     {
 705       bool field_is_known_value_class =  !fieldinfo.field_flags().is_injected() && _inline_type_field_klasses != nullptr && _inline_type_field_klasses->at(fieldinfo.index()) != nullptr;
 706       bool value_has_oops = field_is_known_value_class ? _inline_type_field_klasses->at(fieldinfo.index())->nonstatic_oop_count() > 0 : true;
 707       bool is_candidate_for_flattening = fieldinfo.field_flags().is_null_free_inline_type() || (EnableNullableFieldFlattening && field_is_known_value_class && !value_has_oops);
 708       // if (!fieldinfo.field_flags().is_null_free_inline_type()) {
 709       if (!is_candidate_for_flattening) {
 710         if (group != _static_fields) _nonstatic_oopmap_count++;
 711         group->add_oop_field(idx);
 712       } else {
 713         assert(type != T_ARRAY, "null free ptr to array not supported");
 714         _has_inline_type_fields = true;
 715         if (group == _static_fields) {
 716           // static fields are never flat
 717           group->add_oop_field(idx);
 718         } else {
 719           // Check below is performed for non-static fields, it should be performed for static fields too but at this stage,
 720           // it is not guaranteed that the klass of the static field has been loaded, so the test for static fields is delayed
 721           // until the linking phase
 722           Klass* klass =  _inline_type_field_klasses->at(idx);
 723           assert(klass != nullptr, "Sanity check");
 724           InlineKlass* vk = InlineKlass::cast(klass);
 725           assert(!fieldinfo.field_flags().is_null_free_inline_type() || vk->is_implicitly_constructible(), "must be, should have been checked in post_process_parsed_stream()");
 726           _has_flattening_information = true;
 727           // Flattening decision to be taken here
 728           // This code assumes all verification already have been performed
 729           // (field's type has been loaded and it is an inline klass)
 730           bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
 731                                     (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
 732           bool too_atomic_to_flatten = vk->must_be_atomic() || AlwaysAtomicAccesses;
 733           bool too_volatile_to_flatten = fieldinfo.access_flags().is_volatile();
 734           if (vk->is_naturally_atomic()) {
 735             too_atomic_to_flatten = false;
 736             //too_volatile_to_flatten = false; //FIXME
 737             // Currently, volatile fields are never flat, this could change in the future
 738           }
 739           if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
 740             group->add_flat_field(idx, vk, !fieldinfo.field_flags().is_null_free_inline_type());
 741             _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 742             _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 743             if (!vk->is_atomic()) {  // flat and non-atomic: take note
 744               _has_nonatomic_values = true;
 745               _atomic_field_count--;  // every other field is atomic but this one
 746             }
 747             if (!fieldinfo.field_flags().is_null_free_inline_type()) _has_null_markers = true;
 748           } else {
 749             _nonstatic_oopmap_count++;
 750             group->add_oop_field(idx);
 751           }
 752         }
 753       }
 754       break;
 755     }
 756     default:
 757       fatal("Something wrong?");
 758     }
 759   }
 760   _root_group->sort_by_size();
 761   _static_fields->sort_by_size();
 762   if (!_contended_groups.is_empty()) {
 763     for (int i = 0; i < _contended_groups.length(); i++) {
 764       _contended_groups.at(i)->sort_by_size();
 765     }
 766   }
 767 }
 768 
 769 /* Field sorting for inline classes:
 770  *   - because inline classes are immutable, the @Contended annotation is ignored
 771  *     when computing their layout (with only read operation, there's no false
 772  *     sharing issue)
 773  *   - this method also records the alignment of the field with the most
 774  *     constraining alignment, this value is then used as the alignment
 775  *     constraint when flattening this inline type into another container
 776  *   - field flattening decisions are taken in this method (those decisions are
 777  *     currently only based in the size of the fields to be flattened, the size
 778  *     of the resulting instance is not considered)
 779  */
 780 void FieldLayoutBuilder::inline_class_field_sorting() {
 781   assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
 782   int alignment = -1;
 783   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
 784     FieldGroup* group = nullptr;
 785     FieldInfo fieldinfo = *it;
 786     int field_alignment = 1;
 787     if (fieldinfo.access_flags().is_static()) {
 788       group = _static_fields;
 789     } else {
 790       _has_nonstatic_fields = true;
 791       _atomic_field_count++;  // we might decrement this
 792       group = _root_group;
 793     }
 794     assert(group != nullptr, "invariant");
 795     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 796     switch(type) {
 797     case T_BYTE:
 798     case T_CHAR:
 799     case T_DOUBLE:
 800     case T_FLOAT:
 801     case T_INT:
 802     case T_LONG:
 803     case T_SHORT:
 804     case T_BOOLEAN:
 805       if (group != _static_fields) {
 806         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 807       }
 808       group->add_primitive_field(fieldinfo.index(), type);
 809       break;
 810     case T_OBJECT:
 811     case T_ARRAY:
 812     {
 813       bool field_is_known_value_class =  !fieldinfo.field_flags().is_injected() && _inline_type_field_klasses != nullptr && _inline_type_field_klasses->at(fieldinfo.index()) != nullptr;
 814       bool value_has_oops = field_is_known_value_class ? _inline_type_field_klasses->at(fieldinfo.index())->nonstatic_oop_count() > 0 : true;
 815       bool is_candidate_for_flattening = fieldinfo.field_flags().is_null_free_inline_type() || (EnableNullableFieldFlattening && field_is_known_value_class && !value_has_oops);
 816       // if (!fieldinfo.field_flags().is_null_free_inline_type()) {
 817       if (!is_candidate_for_flattening) {
 818         if (group != _static_fields) {
 819           _nonstatic_oopmap_count++;
 820           field_alignment = type2aelembytes(type); // alignment == size for oops
 821         }
 822         group->add_oop_field(fieldinfo.index());
 823       } else {
 824         assert(type != T_ARRAY, "null free ptr to array not supported");
 825         _has_inline_type_fields = true;
 826         if (group == _static_fields) {
 827           // static fields are never flat
 828           group->add_oop_field(fieldinfo.index());
 829         } else {
 830           // Check below is performed for non-static fields, it should be performed for static fields too but at this stage,
 831           // it is not guaranteed that the klass of the static field has been loaded, so the test for static fields is delayed
 832           // until the linking phase
 833           Klass* klass =  _inline_type_field_klasses->at(fieldinfo.index());
 834           assert(klass != nullptr, "Sanity check");
 835           InlineKlass* vk = InlineKlass::cast(klass);
 836           assert(vk->is_implicitly_constructible(), "must be, should have been checked in post_process_parsed_stream()");
 837           // Flattening decision to be taken here
 838           // This code assumes all verifications have already been performed
 839           // (field's type has been loaded and it is an inline klass)
 840           bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
 841                                     (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
 842           bool too_atomic_to_flatten = vk->must_be_atomic() || AlwaysAtomicAccesses;
 843           bool too_volatile_to_flatten = fieldinfo.access_flags().is_volatile();
 844           if (vk->is_naturally_atomic()) {
 845             too_atomic_to_flatten = false;
 846             //too_volatile_to_flatten = false; //FIXME
 847             // Currently, volatile fields are never flat, this could change in the future
 848           }
 849           if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
 850             group->add_flat_field(fieldinfo.index(), vk, !fieldinfo.field_flags().is_null_free_inline_type());
 851             _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 852             field_alignment = vk->get_alignment();
 853             _field_info->adr_at(fieldinfo.index())->field_flags_addr()->update_flat(true);
 854             if (!vk->is_atomic()) {  // flat and non-atomic: take note
 855               _has_nonatomic_values = true;
 856               _atomic_field_count--;  // every other field is atomic but this one
 857             }
 858             if (!fieldinfo.field_flags().is_null_free_inline_type()) _has_null_markers = true;
 859           } else {
 860             _nonstatic_oopmap_count++;
 861             field_alignment = type2aelembytes(T_OBJECT);
 862             group->add_oop_field(fieldinfo.index());
 863           }
 864         }
 865       }
 866       break;
 867     }
 868     default:
 869       fatal("Unexpected BasicType");
 870     }
 871     if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 872   }
 873   _alignment = alignment;
 874   assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
 875 }
 876 
 877 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 878   if (ContendedPaddingWidth > 0) {
 879     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 880     _layout->insert(slot, padding);
 881   }
 882 }
 883 
 884 /* Computation of regular classes layout is an evolution of the previous default layout
 885  * (FieldAllocationStyle 1):
 886  *   - primitive fields (both primitive types and flat inline types) are allocated
 887  *     first, from the biggest to the smallest
 888  *   - then oop fields are allocated (to increase chances to have contiguous oops and
 889  *     a simpler oopmap).
 890  */
 891 void FieldLayoutBuilder::compute_regular_layout() {
 892   bool need_tail_padding = false;
 893   prologue();
 894   regular_field_sorting();
 895   if (_is_contended) {
 896     _layout->set_start(_layout->last_block());
 897     // insertion is currently easy because the current strategy doesn't try to fill holes
 898     // in super classes layouts => the _start block is by consequence the _last_block
 899     insert_contended_padding(_layout->start());
 900     need_tail_padding = true;
 901   }
 902   _layout->add(_root_group->big_primitive_fields());
 903   _layout->add(_root_group->small_primitive_fields());
 904   _layout->add(_root_group->oop_fields());
 905 
 906   if (!_contended_groups.is_empty()) {
 907     for (int i = 0; i < _contended_groups.length(); i++) {
 908       FieldGroup* cg = _contended_groups.at(i);
 909       LayoutRawBlock* start = _layout->last_block();
 910       insert_contended_padding(start);
 911       _layout->add(cg->big_primitive_fields());
 912       _layout->add(cg->small_primitive_fields(), start);
 913       _layout->add(cg->oop_fields(), start);
 914       need_tail_padding = true;
 915     }
 916   }
 917 
 918   if (need_tail_padding) {
 919     insert_contended_padding(_layout->last_block());
 920   }
 921 
 922   if (EnableNullableFieldFlattening && _layout->has_missing_null_markers()) {
 923     insert_null_markers();
 924   }
 925 
 926   // Warning: IntanceMirrorKlass expects static oops to be allocated first
 927   _static_layout->add_contiguously(_static_fields->oop_fields());
 928   _static_layout->add(_static_fields->big_primitive_fields());
 929   _static_layout->add(_static_fields->small_primitive_fields());
 930 
 931   epilogue();
 932 }
 933 
 934 void FieldLayoutBuilder::insert_null_markers() {
 935   if (!EnableNullableFieldFlattening || !_layout->has_missing_null_markers()) return;
 936   GrowableArray<LayoutRawBlock*>* list = new GrowableArray<LayoutRawBlock*>(10);
 937   for (LayoutRawBlock* block = _layout->first_field_block(); block != _layout->last_block(); block = block->next_block()) {
 938     if (block->needs_null_marker()) {
 939       assert(block->kind() == LayoutRawBlock::FLAT, "Only flat fields might need null markers");
 940       if (block->inline_klass()->has_internal_null_marker_offset()) {
 941         // The inline klass has an internal null marker slot, let's use it
 942         // The inline klass has the internal null marker offset from the begining of the object,
 943         // compute the offset relative to begining of payload
 944         int internal_null_marker_offset = block->inline_klass()->get_internal_null_marker_offset() - block->inline_klass()->first_field_offset();
 945         block->set_null_marker_offset(block->offset() + internal_null_marker_offset);
 946         _field_info->adr_at(block->field_index())->set_null_marker_offset(block->null_marker_offset());
 947         _field_info->adr_at(block->field_index())->field_flags_addr()->update_null_marker(true);
 948         _field_info->adr_at(block->field_index())->field_flags_addr()->update_internal_null_marker(true);
 949       } else {
 950         // No internal null marker, need a external slot in the container
 951         LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
 952         marker->set_field_index(block->field_index());
 953         list->append(marker);
 954       }
 955     }
 956   }
 957   _layout->add(list);
 958   for (GrowableArrayIterator<LayoutRawBlock*> it = list->begin(); it != list->end(); ++it) {
 959     LayoutRawBlock* block = *it;
 960     assert(block->offset() != -1, "Must be set");
 961     assert(!block->needs_null_marker(), "Must have been set");
 962     _field_info->adr_at(block->field_index())->set_null_marker_offset(block->offset());
 963     _field_info->adr_at(block->field_index())->field_flags_addr()->update_null_marker(true);
 964   }
 965 }
 966 
 967 /* Computation of inline classes has a slightly different strategy than for
 968  * regular classes. Regular classes have their oop fields allocated at the end
 969  * of the layout to increase GC performances. Unfortunately, this strategy
 970  * increases the number of empty slots inside an instance. Because the purpose
 971  * of inline classes is to be embedded into other containers, it is critical
 972  * to keep their size as small as possible. For this reason, the allocation
 973  * strategy is:
 974  *   - big primitive fields (primitive types and flat inline type smaller
 975  *     than an oop) are allocated first (from the biggest to the smallest)
 976  *   - then oop fields
 977  *   - then small primitive fields (from the biggest to the smallest)
 978  */
 979 void FieldLayoutBuilder::compute_inline_class_layout() {
 980   prologue();
 981   inline_class_field_sorting();
 982 
 983   assert(_layout->start()->kind() == LayoutRawBlock::RESERVED, "Unexpected");
 984 
 985   if (_layout->super_has_fields() && !_is_abstract_value) {  // non-static field layout
 986     if (!_has_nonstatic_fields) {
 987       assert(_is_abstract_value, "Concrete value types have at least one field");
 988       // Nothing to do
 989     } else {
 990       // decide which alignment to use, then set first allowed field offset
 991 
 992       assert(_layout->super_alignment() >= _alignment, "Incompatible alignment");
 993       assert(_layout->super_alignment() % _alignment == 0, "Incompatible alignment");
 994 
 995       if (_alignment < _layout->super_alignment()) {
 996         int new_alignment = _alignment > _layout->super_min_align_required() ? _alignment : _layout->super_min_align_required();
 997         assert(new_alignment % _alignment == 0, "Must be");
 998         assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
 999         _alignment = new_alignment;
1000       }
1001       if (_layout->first_empty_block()->offset() < _layout->first_field_block()->offset()) {
1002         LayoutRawBlock* first_empty = _layout->start()->next_block();
1003         if (first_empty->offset() % _alignment != 0) {
1004           int size =  _alignment - (first_empty->offset() % _alignment);
1005           LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, size);
1006           _layout->insert(first_empty, padding);
1007           _layout->set_start(padding);
1008         } else {
1009           _layout->set_start( _layout->start());
1010         }
1011       } else {
1012         _layout->set_start(_layout->first_field_block());
1013       }
1014     }
1015   } else {
1016     if (_is_abstract_value && _has_nonstatic_fields) {
1017       _alignment = type2aelembytes(BasicType::T_LONG);
1018     }
1019     assert(_layout->start()->next_block()->kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1020     LayoutRawBlock* first_empty = _layout->start()->next_block();
1021     if (first_empty->offset() % _alignment != 0) {
1022       LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _alignment - (first_empty->offset() % _alignment));
1023       _layout->insert(first_empty, padding);
1024       if (first_empty->size() == 0) {
1025         _layout->remove(first_empty);
1026       }
1027       _layout->set_start(padding);
1028     }
1029   }
1030 
1031   _layout->add(_root_group->big_primitive_fields());
1032   _layout->add(_root_group->oop_fields());
1033   _layout->add(_root_group->small_primitive_fields());
1034 
1035   if (EnableNullableFieldFlattening && _layout->has_missing_null_markers()) {
1036     insert_null_markers();
1037   }
1038 
1039   LayoutRawBlock* first_field = _layout->first_field_block();
1040   if (first_field != nullptr) {
1041     _first_field_offset = _layout->first_field_block()->offset();
1042     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1043   } else {
1044     // special case for empty value types
1045     _first_field_offset = _layout->blocks()->size();
1046     _payload_size_in_bytes = 0;
1047   }
1048   if (_layout->first_field_block() == nullptr) {
1049     assert(_is_abstract_value, "Concrete inline types must have at least one field");
1050     _payload_size_in_bytes = 0;
1051   } else {
1052     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1053   }
1054 
1055   // Looking if there's an empty slot inside the layout that could be used to store a null marker
1056   LayoutRawBlock* b = _layout->first_field_block();
1057   if (b != nullptr) {
1058     while (b != _layout->last_block()) {
1059       if (b->kind() == LayoutRawBlock::EMPTY) {
1060         break;
1061       }
1062       b = b->next_block();
1063     }
1064     if (b != _layout->last_block()) {
1065       // found an empty slot, register its offset from the beginning of the payload
1066       _internal_null_marker_offset = b->offset();
1067     }
1068   } else {
1069     assert(_is_abstract_value, "Only abstract value can have no fields");
1070   }
1071 
1072   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1073   _static_layout->add_contiguously(_static_fields->oop_fields());
1074   _static_layout->add(_static_fields->big_primitive_fields());
1075   _static_layout->add(_static_fields->small_primitive_fields());
1076 
1077   epilogue();
1078 }
1079 
1080 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1081                 InlineKlass* vklass, int offset) {
1082   int diff = offset - vklass->first_field_offset();
1083   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1084   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1085   while (map < last_map) {
1086     nonstatic_oop_maps->add(map->offset() + diff, map->count());
1087     map++;
1088   }
1089 }
1090 
1091 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1092   if (list != nullptr) {
1093     for (int i = 0; i < list->length(); i++) {
1094       LayoutRawBlock* f = list->at(i);
1095       if (f->kind() == LayoutRawBlock::FLAT) {
1096         InlineKlass* vk = f->inline_klass();
1097         assert(vk != nullptr, "Should have been initialized");
1098         if (vk->contains_oops()) {
1099           add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1100         }
1101       }
1102     }
1103   }
1104 }
1105 
1106 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1107   if (group->oop_fields() != nullptr) {
1108     for (int i = 0; i < group->oop_fields()->length(); i++) {
1109       LayoutRawBlock* b = group->oop_fields()->at(i);
1110       nonstatic_oop_maps->add(b->offset(), 1);
1111     }
1112   }
1113   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1114   register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1115 }
1116 
1117 void FieldLayoutBuilder::epilogue() {
1118   // Computing oopmaps
1119   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1120   int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
1121   OopMapBlocksBuilder* nonstatic_oop_maps =
1122       new OopMapBlocksBuilder(max_oop_map_count);
1123   if (super_oop_map_count > 0) {
1124     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1125     _super_klass->nonstatic_oop_map_count());
1126   }
1127   register_embedded_oops(nonstatic_oop_maps, _root_group);
1128   if (!_contended_groups.is_empty()) {
1129     for (int i = 0; i < _contended_groups.length(); i++) {
1130       FieldGroup* cg = _contended_groups.at(i);
1131       if (cg->oop_count() > 0) {
1132         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1133         register_embedded_oops(nonstatic_oop_maps, cg);
1134       }
1135     }
1136   }
1137   nonstatic_oop_maps->compact();
1138 
1139   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1140   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1141   int static_fields_size = (static_fields_end -
1142       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1143   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1144 
1145   // Pass back information needed for InstanceKlass creation
1146 
1147   _info->oop_map_blocks = nonstatic_oop_maps;
1148   _info->_instance_size = align_object_size(instance_end / wordSize);
1149   _info->_static_field_size = static_fields_size;
1150   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1151   _info->_has_nonstatic_fields = _has_nonstatic_fields;
1152   _info->_has_inline_fields = _has_inline_type_fields;
1153   _info->_has_null_marker_offsets = _has_null_markers;
1154 
1155   // An inline type is naturally atomic if it has just one field, and
1156   // that field is simple enough.
1157   _info->_is_naturally_atomic = (_is_inline_type &&
1158                                  (_atomic_field_count <= 1) &&
1159                                  !_has_nonatomic_values &&
1160                                  _contended_groups.is_empty());
1161   // This may be too restrictive, since if all the fields fit in 64
1162   // bits we could make the decision to align instances of this class
1163   // to 64-bit boundaries, and load and store them as single words.
1164   // And on machines which supported larger atomics we could similarly
1165   // allow larger values to be atomic, if properly aligned.
1166 
1167 #ifdef ASSERT
1168   // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1169   // which prints the details of LayoutRawBlocks used to compute the layout.
1170   // The code below checks that offsets in the _field_info meta-data match offsets
1171   // in the LayoutRawBlocks
1172   LayoutRawBlock* b = _layout->blocks();
1173   while(b != _layout->last_block()) {
1174     if (b->kind() == LayoutRawBlock::REGULAR || b->kind() == LayoutRawBlock::FLAT) {
1175       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1176     }
1177     b = b->next_block();
1178   }
1179   b = _static_layout->blocks();
1180   while(b != _static_layout->last_block()) {
1181     if (b->kind() == LayoutRawBlock::REGULAR || b->kind() == LayoutRawBlock::FLAT) {
1182       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1183     }
1184     b = b->next_block();
1185   }
1186 #endif // ASSERT
1187 
1188   static bool first_layout_print = true;
1189 
1190 
1191   if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
1192     ResourceMark rm;
1193     stringStream st;
1194     if (first_layout_print) {
1195       st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1196       st.print_cr("Heap oop size = %d", heapOopSize);
1197       first_layout_print = false;
1198     }
1199     if (_super_klass != nullptr) {
1200       st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1201                     _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1202     } else {
1203       st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1204     }
1205     st.print_cr("Instance fields:");
1206     _layout->print(&st, false, _super_klass, _inline_type_field_klasses);
1207     st.print_cr("Static fields:");
1208     _static_layout->print(&st, true, nullptr, _inline_type_field_klasses);
1209     st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1210     if (_is_inline_type) {
1211       st.print_cr("First field offset = %d", _first_field_offset);
1212       st.print_cr("Alignment = %d bytes", _alignment);
1213       st.print_cr("Exact size = %d bytes", _payload_size_in_bytes);
1214       if (_internal_null_marker_offset != -1) {
1215         st.print_cr("Null marker offset = %d", _internal_null_marker_offset);
1216       }
1217     }
1218     st.print_cr("---");
1219     // Print output all together.
1220     tty->print_raw(st.as_string());
1221   }
1222 }
1223 
1224 void FieldLayoutBuilder::build_layout() {
1225   if (_is_inline_type || _is_abstract_value) {
1226     compute_inline_class_layout();
1227   } else {
1228     compute_regular_layout();
1229   }
1230 }