1 /*
   2  * Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "jvm.h"
  30 #include "memory/resourceArea.hpp"
  31 #include "oops/array.hpp"
  32 #include "oops/fieldStreams.inline.hpp"
  33 #include "oops/inlineKlass.inline.hpp"
  34 #include "oops/instanceKlass.inline.hpp"
  35 #include "oops/instanceMirrorKlass.hpp"
  36 #include "oops/klass.inline.hpp"
  37 #include "runtime/fieldDescriptor.inline.hpp"
  38 #include "utilities/align.hpp"
  39 #include "utilities/powerOfTwo.hpp"
  40 
  41 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
  42                                          bool can_use_atomic_flat) {
  43 
  44   // The can_use_atomic_flat argument indicates if an atomic flat layout can be used for this field.
  45   // This argument will be false if the container is a loosely consistent value class. Using an atomic layout
  46   // in a container that has no atomicity guarantee creates a risk to see this field's value be subject to
  47   // tearing even if the field's class was declared atomic (non loosely consistent).
  48 
  49   if (!UseFieldFlattening) {
  50     return LayoutKind::REFERENCE;
  51   }
  52 
  53   if (field_info.field_flags().is_injected()) {
  54     // don't flatten injected fields
  55     return LayoutKind::REFERENCE;
  56   }
  57 
  58   if (field_info.access_flags().is_volatile()) {
  59     // volatile is used as a keyword to prevent flattening
  60     return LayoutKind::REFERENCE;
  61   }
  62 
  63   if (field_info.access_flags().is_static()) {
  64     assert(inline_layout_info_array == nullptr ||
  65                inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr,
  66            "Static fields do not have inline layout info");
  67     // don't flatten static fields
  68     return LayoutKind::REFERENCE;
  69   }
  70 
  71   if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
  72     // field's type is not a known value class, using a reference
  73     return LayoutKind::REFERENCE;
  74   }
  75 
  76   InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
  77   InlineKlass* vk = inline_field_info->klass();
  78 
  79   if (field_info.field_flags().is_null_free_inline_type()) {
  80     assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
  81     if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
  82       if (vk->is_naturally_atomic() && vk->has_null_free_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
  83       return (vk->has_null_free_atomic_layout() && can_use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  84     } else {
  85       return vk->has_null_free_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
  86     }
  87   } else {
  88     // To preserve the consistency between the null-marker and the field content, the NULLABLE_NON_ATOMIC_FLAT
  89     // can only be used in containers that have atomicity quarantees (can_use_atomic_flat argument set to true)
  90     if (field_info.access_flags().is_strict() && field_info.access_flags().is_final() && can_use_atomic_flat) {
  91       if (vk->has_nullable_non_atomic_layout()) return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
  92     }
  93     // Another special case where NULLABLE_NON_ATOMIC_FLAT can be used: nullable empty values, because the
  94     // payload of those values contains only the null-marker
  95     if (vk->is_empty_inline_type() && vk->has_nullable_non_atomic_layout()) {
  96       return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
  97     }
  98     if (UseNullableAtomicValueFlattening && vk->has_nullable_atomic_layout()) {
  99       return can_use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
 100     } else {
 101       return LayoutKind::REFERENCE;
 102     }
 103   }
 104 }
 105 
 106 static bool field_is_inlineable(FieldInfo fieldinfo, LayoutKind lk, Array<InlineLayoutInfo>* ili) {
 107   if (fieldinfo.field_flags().is_null_free_inline_type()) {
 108     // A null-free inline type is always inlineable
 109     return true;
 110   }
 111 
 112   if (lk != LayoutKind::REFERENCE) {
 113     assert(lk != LayoutKind::BUFFERED, "Sanity check");
 114     assert(lk != LayoutKind::UNKNOWN, "Sanity check");
 115     // We've chosen a layout that isn't a normal reference
 116     return true;
 117   }
 118 
 119   const int field_index = (int)fieldinfo.index();
 120   if (!fieldinfo.field_flags().is_injected() &&
 121       ili != nullptr &&
 122       ili->adr_at(field_index)->klass() != nullptr &&
 123       !ili->adr_at(field_index)->klass()->is_identity_class() &&
 124       !ili->adr_at(field_index)->klass()->is_abstract()) {
 125     // The field's klass is not an identity class or abstract
 126     return true;
 127   }
 128 
 129   return false;
 130 }
 131 
 132 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
 133   _next_block(nullptr),
 134   _prev_block(nullptr),
 135   _inline_klass(nullptr),
 136   _block_kind(kind),
 137   _layout_kind(LayoutKind::UNKNOWN),
 138   _offset(-1),
 139   _alignment(1),
 140   _size(size),
 141   _field_index(-1) {
 142   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
 143          "Otherwise, should use the constructor with a field index argument");
 144   assert(size > 0, "Sanity check");
 145 }
 146 
 147 
 148 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
 149  _next_block(nullptr),
 150  _prev_block(nullptr),
 151  _inline_klass(nullptr),
 152  _block_kind(kind),
 153  _layout_kind(LayoutKind::UNKNOWN),
 154  _offset(-1),
 155  _alignment(alignment),
 156  _size(size),
 157  _field_index(index) {
 158   assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
 159          "Other kind do not have a field index");
 160   assert(size > 0, "Sanity check");
 161   assert(alignment > 0, "Sanity check");
 162 }
 163 
 164 bool LayoutRawBlock::fit(int size, int alignment) {
 165   int adjustment = 0;
 166   if ((_offset % alignment) != 0) {
 167     adjustment = alignment - (_offset % alignment);
 168   }
 169   return _size >= size + adjustment;
 170 }
 171 
 172 FieldGroup::FieldGroup(int contended_group) :
 173   _next(nullptr),
 174   _small_primitive_fields(nullptr),
 175   _big_primitive_fields(nullptr),
 176   _oop_fields(nullptr),
 177   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
 178   _oop_count(0) {}
 179 
 180 void FieldGroup::add_primitive_field(int idx, BasicType type) {
 181   int size = type2aelembytes(type);
 182   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
 183   if (size >= heapOopSize) {
 184     add_to_big_primitive_list(block);
 185   } else {
 186     add_to_small_primitive_list(block);
 187   }
 188 }
 189 
 190 void FieldGroup::add_oop_field(int idx) {
 191   int size = type2aelembytes(T_OBJECT);
 192   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
 193   if (_oop_fields == nullptr) {
 194     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 195   }
 196   _oop_fields->append(block);
 197   _oop_count++;
 198 }
 199 
 200 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk) {
 201   const int size = vk->layout_size_in_bytes(lk);
 202   const int alignment = vk->layout_alignment(lk);
 203 
 204   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
 205   block->set_inline_klass(vk);
 206   block->set_layout_kind(lk);
 207   if (block->size() >= heapOopSize) {
 208     add_to_big_primitive_list(block);
 209   } else {
 210     assert(!vk->contains_oops(), "Size of Inline klass with oops should be >= heapOopSize");
 211     add_to_small_primitive_list(block);
 212   }
 213 }
 214 
 215 void FieldGroup::sort_by_size() {
 216   if (_small_primitive_fields != nullptr) {
 217     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 218   }
 219   if (_big_primitive_fields != nullptr) {
 220     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 221   }
 222 }
 223 
 224 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 225   if (_small_primitive_fields == nullptr) {
 226     _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 227   }
 228   _small_primitive_fields->append(block);
 229 }
 230 
 231 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 232   if (_big_primitive_fields == nullptr) {
 233     _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 234   }
 235   _big_primitive_fields->append(block);
 236 }
 237 
 238 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
 239   _field_info(field_info),
 240   _inline_layout_info_array(inline_layout_info_array),
 241   _cp(cp),
 242   _blocks(nullptr),
 243   _start(_blocks),
 244   _last(_blocks),
 245   _super_first_field_offset(-1),
 246   _super_alignment(-1),
 247   _super_min_align_required(-1),
 248   _null_reset_value_offset(-1),
 249   _acmp_maps_offset(-1),
 250   _super_has_nonstatic_fields(false),
 251   _has_inherited_fields(false) {}
 252 
 253 void FieldLayout::initialize_static_layout() {
 254   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 255   _blocks->set_offset(0);
 256   _last = _blocks;
 257   _start = _blocks;
 258   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 259   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 260   // of static field is computed. Field offsets are fixed later when the size is known
 261   // (see java_lang_Class::fixup_mirror())
 262   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 263     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 264     _blocks->set_offset(0);
 265   }
 266 }
 267 
 268 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 269   if (super_klass == nullptr) {
 270     super_ends_with_oop = false;
 271     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 272     _blocks->set_offset(0);
 273     _last = _blocks;
 274     _start = _blocks;
 275     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 276   } else {
 277     reconstruct_layout(super_klass, _super_has_nonstatic_fields, super_ends_with_oop);
 278     fill_holes(super_klass);
 279     if ((!super_klass->has_contended_annotations()) || !_super_has_nonstatic_fields) {
 280       _start = _blocks;  // start allocating fields from the first empty block
 281     } else {
 282       _start = _last;    // append fields at the end of the reconstructed layout
 283     }
 284   }
 285 }
 286 
 287 LayoutRawBlock* FieldLayout::first_field_block() {
 288   LayoutRawBlock* block = _blocks;
 289   while (block != nullptr
 290          && block->block_kind() != LayoutRawBlock::INHERITED
 291          && block->block_kind() != LayoutRawBlock::REGULAR
 292          && block->block_kind() != LayoutRawBlock::FLAT
 293          && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 294     block = block->next_block();
 295   }
 296   return block;
 297 }
 298 
 299 // Insert a set of fields into a layout.
 300 // For each field, search for an empty slot able to fit the field
 301 // (satisfying both size and alignment requirements), if none is found,
 302 // add the field at the end of the layout.
 303 // Fields cannot be inserted before the block specified in the "start" argument
 304 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 305   if (list == nullptr) return;
 306   if (start == nullptr) start = this->_start;
 307   bool last_search_success = false;
 308   int last_size = 0;
 309   int last_alignment = 0;
 310   for (int i = 0; i < list->length(); i ++) {
 311     LayoutRawBlock* b = list->at(i);
 312     LayoutRawBlock* cursor = nullptr;
 313     LayoutRawBlock* candidate = nullptr;
 314     // if start is the last block, just append the field
 315     if (start == last_block()) {
 316       candidate = last_block();
 317     }
 318     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 319     // check if the previous field had the same requirements and if the search for a fitting slot
 320     // was successful. If the requirements were the same but the search failed, a new search will
 321     // fail the same way, so just append the field at the of the layout.
 322     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 323       candidate = last_block();
 324     } else {
 325       // Iterate over the layout to find an empty slot fitting the field's requirements
 326       last_size = b->size();
 327       last_alignment = b->alignment();
 328       cursor = last_block()->prev_block();
 329       assert(cursor != nullptr, "Sanity check");
 330       last_search_success = true;
 331 
 332       assert(start->block_kind() != LayoutRawBlock::EMPTY, "");
 333       while (cursor != start) {
 334         if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 335           if (candidate == nullptr || cursor->size() < candidate->size()) {
 336             candidate = cursor;
 337           }
 338         }
 339         cursor = cursor->prev_block();
 340       }
 341       if (candidate == nullptr) {
 342         candidate = last_block();
 343         last_search_success = false;
 344       }
 345       assert(candidate != nullptr, "Candidate must not be null");
 346       assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 347       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 348     }
 349     insert_field_block(candidate, b);
 350   }
 351 }
 352 
 353 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 354 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 355   assert(block != nullptr, "Sanity check");
 356   block->set_offset(offset);
 357   if (start == nullptr) {
 358     start = this->_start;
 359   }
 360   LayoutRawBlock* slot = start;
 361   while (slot != nullptr) {
 362     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 363         slot == _last){
 364       assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 365       assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
 366       if (slot->offset() < block->offset()) {
 367         int adjustment = block->offset() - slot->offset();
 368         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 369         insert(slot, adj);
 370       }
 371       insert(slot, block);
 372       if (slot->size() == 0) {
 373         remove(slot);
 374       }
 375       if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
 376         _field_info->adr_at(block->field_index())->set_offset(block->offset());
 377       }
 378       return;
 379     }
 380     slot = slot->next_block();
 381   }
 382   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 383 }
 384 
 385 // The allocation logic uses a best fit strategy: the set of fields is allocated
 386 // in the first empty slot big enough to contain the whole set ((including padding
 387 // to fit alignment constraints).
 388 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 389   if (list == nullptr) return;
 390   if (start == nullptr) {
 391     start = _start;
 392   }
 393   // This code assumes that if the first block is well aligned, the following
 394   // blocks would naturally be well aligned (no need for adjustment)
 395   int size = 0;
 396   for (int i = 0; i < list->length(); i++) {
 397     size += list->at(i)->size();
 398   }
 399 
 400   LayoutRawBlock* candidate = nullptr;
 401   if (start == last_block()) {
 402     candidate = last_block();
 403   } else {
 404     LayoutRawBlock* first = list->at(0);
 405     candidate = last_block()->prev_block();
 406     while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 407       if (candidate == start) {
 408         candidate = last_block();
 409         break;
 410       }
 411       candidate = candidate->prev_block();
 412     }
 413     assert(candidate != nullptr, "Candidate must not be null");
 414     assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 415     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 416   }
 417 
 418   for (int i = 0; i < list->length(); i++) {
 419     LayoutRawBlock* b = list->at(i);
 420     insert_field_block(candidate, b);
 421     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 422   }
 423 }
 424 
 425 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 426   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 427   if (slot->offset() % block->alignment() != 0) {
 428     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 429     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 430     insert(slot, adj);
 431   }
 432   assert(slot->size() >= block->size(), "Enough space must remain after adjustment");
 433   insert(slot, block);
 434   if (slot->size() == 0) {
 435     remove(slot);
 436   }
 437   // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
 438   if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 439     _field_info->adr_at(block->field_index())->set_offset(block->offset());
 440     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
 441       _null_reset_value_offset = block->offset();
 442     }
 443     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
 444       _acmp_maps_offset = block->offset();
 445     }
 446   }
 447   if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
 448     int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
 449     _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 450     _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 451   }
 452 
 453   return block;
 454 }
 455 
 456 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_nonstatic_fields, bool& ends_with_oop) {
 457   has_nonstatic_fields = ends_with_oop = false;
 458   if (ik->is_abstract() && !ik->is_identity_class()) {
 459     _super_alignment = type2aelembytes(BasicType::T_LONG);
 460   }
 461   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 462   BasicType last_type;
 463   int last_offset = -1;
 464   while (ik != nullptr) {
 465     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 466       BasicType type = Signature::basic_type(fs.signature());
 467       // distinction between static and non-static fields is missing
 468       if (fs.access_flags().is_static()) continue;
 469       has_nonstatic_fields = true;
 470       _has_inherited_fields = true;
 471       if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
 472         _super_first_field_offset = fs.offset();
 473       }
 474       LayoutRawBlock* block;
 475       if (fs.is_flat()) {
 476         InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
 477         InlineKlass* vk = layout_info.klass();
 478         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
 479                                    vk->layout_size_in_bytes(layout_info.kind()),
 480                                    vk->layout_alignment(layout_info.kind()));
 481         assert(_super_alignment == -1 || _super_alignment >=  vk->payload_alignment(), "Invalid value alignment");
 482         _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
 483       } else {
 484         int size = type2aelembytes(type);
 485         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 486         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
 487         // For primitive types, the alignment is equal to the size
 488         assert(_super_alignment == -1 || _super_alignment >=  size, "Invalid value alignment");
 489         _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
 490       }
 491       if (fs.offset() > last_offset) {
 492         last_offset = fs.offset();
 493         last_type = type;
 494       }
 495       block->set_offset(fs.offset());
 496       all_fields->append(block);
 497     }
 498     ik = ik->super() == nullptr ? nullptr : ik->super();
 499   }
 500   assert(last_offset == -1 || last_offset > 0, "Sanity");
 501   if (last_offset > 0 &&
 502       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 503     ends_with_oop = true;
 504   }
 505 
 506   all_fields->sort(LayoutRawBlock::compare_offset);
 507   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 508   _blocks->set_offset(0);
 509   _last = _blocks;
 510   for(int i = 0; i < all_fields->length(); i++) {
 511     LayoutRawBlock* b = all_fields->at(i);
 512     _last->set_next_block(b);
 513     b->set_prev_block(_last);
 514     _last = b;
 515   }
 516   _start = _blocks;
 517 }
 518 
 519 // Called during the reconstruction of a layout, after fields from super
 520 // classes have been inserted. It fills unused slots between inserted fields
 521 // with EMPTY blocks, so the regular field insertion methods would work.
 522 // This method handles classes with @Contended annotations differently
 523 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 524 // fields to interfere with contended fields/classes.
 525 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 526   assert(_blocks != nullptr, "Sanity check");
 527   assert(_blocks->offset() == 0, "first block must be at offset zero");
 528   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 529   LayoutRawBlock* b = _blocks;
 530   while (b->next_block() != nullptr) {
 531     if (b->next_block()->offset() > (b->offset() + b->size())) {
 532       int size = b->next_block()->offset() - (b->offset() + b->size());
 533       // FIXME it would be better if initial empty block where tagged as PADDING for value classes
 534       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 535       empty->set_offset(b->offset() + b->size());
 536       empty->set_next_block(b->next_block());
 537       b->next_block()->set_prev_block(empty);
 538       b->set_next_block(empty);
 539       empty->set_prev_block(b);
 540     }
 541     b = b->next_block();
 542   }
 543   assert(b->next_block() == nullptr, "Invariant at this point");
 544   assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
 545   // If the super class has @Contended annotation, a padding block is
 546   // inserted at the end to ensure that fields from the subclasses won't share
 547   // the cache line of the last field of the contended class
 548   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 549     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 550     p->set_offset(b->offset() + b->size());
 551     b->set_next_block(p);
 552     p->set_prev_block(b);
 553     b = p;
 554   }
 555 
 556   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 557   last->set_offset(b->offset() + b->size());
 558   assert(last->offset() > 0, "Sanity check");
 559   b->set_next_block(last);
 560   last->set_prev_block(b);
 561   _last = last;
 562 }
 563 
 564 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 565   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 566   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 567   block->set_offset(slot->offset());
 568   slot->set_offset(slot->offset() + block->size());
 569   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 570   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 571   slot->set_size(slot->size() - block->size());
 572   block->set_prev_block(slot->prev_block());
 573   block->set_next_block(slot);
 574   slot->set_prev_block(block);
 575   if (block->prev_block() != nullptr) {
 576     block->prev_block()->set_next_block(block);
 577   }
 578   if (_blocks == slot) {
 579     _blocks = block;
 580   }
 581   if (_start == slot) {
 582     _start = block;
 583   }
 584   return block;
 585 }
 586 
 587 void FieldLayout::remove(LayoutRawBlock* block) {
 588   assert(block != nullptr, "Sanity check");
 589   assert(block != _last, "Sanity check");
 590   if (_blocks == block) {
 591     _blocks = block->next_block();
 592     if (_blocks != nullptr) {
 593       _blocks->set_prev_block(nullptr);
 594     }
 595   } else {
 596     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 597     block->prev_block()->set_next_block(block->next_block());
 598     block->next_block()->set_prev_block(block->prev_block());
 599   }
 600   if (block == _start) {
 601     _start = block->prev_block();
 602   }
 603 }
 604 
 605 void FieldLayout::shift_fields(int shift) {
 606   LayoutRawBlock* b = first_field_block();
 607   assert(b != nullptr, "shift_fields must not be called if layout has no fields");
 608   LayoutRawBlock* previous = b->prev_block();
 609   if (previous->block_kind() == LayoutRawBlock::EMPTY) {
 610     previous->set_size(previous->size() + shift);
 611   } else {
 612     LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
 613     nb->set_offset(b->offset());
 614     previous->set_next_block(nb);
 615     nb->set_prev_block(previous);
 616     b->set_prev_block(nb);
 617     nb->set_next_block(b);
 618   }
 619   while (b != nullptr) {
 620     b->set_offset(b->offset() + shift);
 621     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
 622       _field_info->adr_at(b->field_index())->set_offset(b->offset());
 623       if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
 624         int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
 625         _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 626         _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 627       }
 628     }
 629     assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
 630     b = b->next_block();
 631   }
 632 }
 633 
 634 LayoutRawBlock* FieldLayout::find_null_marker() {
 635   LayoutRawBlock* b = _blocks;
 636   while (b != nullptr) {
 637     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 638       return b;
 639     }
 640     b = b->next_block();
 641   }
 642   ShouldNotReachHere();
 643   return nullptr;
 644 }
 645 
 646 void FieldLayout::remove_null_marker() {
 647   LayoutRawBlock* b = first_field_block();
 648   while (b != nullptr) {
 649     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 650       if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
 651         LayoutRawBlock* n = b->next_block();
 652         remove(b);
 653         n->set_offset(b->offset());
 654         n->set_size(n->size() + b->size());
 655       } else {
 656         b->set_block_kind(LayoutRawBlock::EMPTY);
 657       }
 658       return;
 659     }
 660     b = b->next_block();
 661   }
 662   ShouldNotReachHere(); // if we reach this point, the null marker was not found!
 663 }
 664 
 665 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields, bool dummy_field_is_reused_as_null_marker) {
 666   ResourceMark rm;
 667   LayoutRawBlock* b = _blocks;
 668   while(b != _last) {
 669     switch(b->block_kind()) {
 670       case LayoutRawBlock::REGULAR: {
 671         FieldInfo* fi = _field_info->adr_at(b->field_index());
 672         output->print(" @%d %s %d/%d \"%s\" %s",
 673                       b->offset(),
 674                       "REGULAR",
 675                       b->size(),
 676                       b->alignment(),
 677                       fi->name(_cp)->as_C_string(),
 678                       fi->signature(_cp)->as_C_string());
 679 
 680         if (dummy_field_is_reused_as_null_marker) {
 681           const bool is_dummy_field = fi->name(_cp)->fast_compare(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(empty_marker_name))) == 0;
 682           if (is_dummy_field) {
 683             output->print(" (reused as null-marker)");
 684           }
 685         }
 686 
 687         output->cr();
 688         break;
 689       }
 690       case LayoutRawBlock::FLAT: {
 691         FieldInfo* fi = _field_info->adr_at(b->field_index());
 692         InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
 693         assert(ik != nullptr, "");
 694         output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
 695                          b->offset(),
 696                          "FLAT",
 697                          b->size(),
 698                          b->alignment(),
 699                          fi->name(_cp)->as_C_string(),
 700                          fi->signature(_cp)->as_C_string(),
 701                          ik->name()->as_C_string(),
 702                          ik->class_loader_data(),
 703                          LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
 704         break;
 705       }
 706       case LayoutRawBlock::RESERVED: {
 707         output->print_cr(" @%d %s %d/-",
 708                          b->offset(),
 709                          "RESERVED",
 710                          b->size());
 711         break;
 712       }
 713       case LayoutRawBlock::INHERITED: {
 714         assert(!is_static, "Static fields are not inherited in layouts");
 715         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 716         bool found = false;
 717         const InstanceKlass* ik = super;
 718         while (!found && ik != nullptr) {
 719           for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 720             if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
 721               output->print_cr(" @%d %s %d/%d \"%s\" %s",
 722                   b->offset(),
 723                   "INHERITED",
 724                   b->size(),
 725                   b->alignment(),
 726                   fs.name()->as_C_string(),
 727                   fs.signature()->as_C_string());
 728               found = true;
 729               break;
 730             }
 731         }
 732         ik = ik->super();
 733       }
 734       break;
 735     }
 736     case LayoutRawBlock::EMPTY:
 737       output->print_cr(" @%d %s %d/1",
 738                        b->offset(),
 739                       "EMPTY",
 740                        b->size());
 741       break;
 742     case LayoutRawBlock::PADDING:
 743       output->print_cr(" @%d %s %d/1",
 744                       b->offset(),
 745                       "PADDING",
 746                       b->size());
 747       break;
 748     case LayoutRawBlock::NULL_MARKER:
 749     {
 750       output->print_cr(" @%d %s %d/1 ",
 751                       b->offset(),
 752                       "NULL_MARKER",
 753                       b->size());
 754       break;
 755     }
 756     default:
 757       fatal("Unknown block type");
 758     }
 759     b = b->next_block();
 760   }
 761 }
 762 
 763 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 764                                        GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
 765                                        bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
 766   _classname(classname),
 767   _loader_data(loader_data),
 768   _super_klass(super_klass),
 769   _constant_pool(constant_pool),
 770   _field_info(field_info),
 771   _info(info),
 772   _inline_layout_info_array(inline_layout_info_array),
 773   _root_group(nullptr),
 774   _contended_groups(GrowableArray<FieldGroup*>(8)),
 775   _static_fields(nullptr),
 776   _layout(nullptr),
 777   _static_layout(nullptr),
 778   _nonstatic_oopmap_count(0),
 779   _payload_alignment(-1),
 780   _payload_offset(-1),
 781   _null_marker_offset(-1),
 782   _payload_size_in_bytes(-1),
 783   _null_free_non_atomic_layout_size_in_bytes(-1),
 784   _null_free_non_atomic_layout_alignment(-1),
 785   _null_free_atomic_layout_size_in_bytes(-1),
 786   _nullable_atomic_layout_size_in_bytes(-1),
 787   _nullable_non_atomic_layout_size_in_bytes(-1),
 788   _fields_size_sum(0),
 789   _declared_nonstatic_fields_count(0),
 790   _has_non_naturally_atomic_fields(false),
 791   _is_naturally_atomic(false),
 792   _must_be_atomic(must_be_atomic),
 793   _has_nonstatic_fields(false),
 794   _has_inlineable_fields(false),
 795   _has_inlined_fields(false),
 796   _is_contended(is_contended),
 797   _is_inline_type(is_inline_type),
 798   _is_abstract_value(is_abstract_value),
 799   _is_empty_inline_class(false) {}
 800 
 801 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 802   assert(g > 0, "must only be called for named contended groups");
 803   FieldGroup* fg = nullptr;
 804   for (int i = 0; i < _contended_groups.length(); i++) {
 805     fg = _contended_groups.at(i);
 806     if (fg->contended_group() == g) return fg;
 807   }
 808   fg = new FieldGroup(g);
 809   _contended_groups.append(fg);
 810   return fg;
 811 }
 812 
 813 void FieldLayoutBuilder::prologue() {
 814   _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 815   const InstanceKlass* super_klass = _super_klass;
 816   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
 817   _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
 818   if (super_klass != nullptr) {
 819     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 820   }
 821   _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 822   _static_layout->initialize_static_layout();
 823   _static_fields = new FieldGroup();
 824   _root_group = new FieldGroup();
 825 }
 826 
 827 // Field sorting for regular (non-inline) classes:
 828 //   - fields are sorted in static and non-static fields
 829 //   - non-static fields are also sorted according to their contention group
 830 //     (support of the @Contended annotation)
 831 //   - @Contended annotation is ignored for static fields
 832 //   - field flattening decisions are taken in this method
 833 void FieldLayoutBuilder::regular_field_sorting() {
 834   int idx = 0;
 835   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 836     FieldGroup* group = nullptr;
 837     FieldInfo fieldinfo = *it;
 838     if (fieldinfo.access_flags().is_static()) {
 839       group = _static_fields;
 840     } else {
 841       _has_nonstatic_fields = true;
 842       if (fieldinfo.field_flags().is_contended()) {
 843         int g = fieldinfo.contended_group();
 844         if (g == 0) {
 845           group = new FieldGroup(true);
 846           _contended_groups.append(group);
 847         } else {
 848           group = get_or_create_contended_group(g);
 849         }
 850       } else {
 851         group = _root_group;
 852       }
 853     }
 854     assert(group != nullptr, "invariant");
 855     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 856     switch(type) {
 857     case T_BYTE:
 858     case T_CHAR:
 859     case T_DOUBLE:
 860     case T_FLOAT:
 861     case T_INT:
 862     case T_LONG:
 863     case T_SHORT:
 864     case T_BOOLEAN:
 865       group->add_primitive_field(idx, type);
 866       break;
 867     case T_OBJECT:
 868     case T_ARRAY:
 869     {
 870       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
 871 
 872       if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
 873         _has_inlineable_fields = true;
 874       }
 875 
 876       if (lk == LayoutKind::REFERENCE) {
 877         if (group != _static_fields) _nonstatic_oopmap_count++;
 878         group->add_oop_field(idx);
 879       } else {
 880         assert(group != _static_fields, "Static fields are not flattened");
 881         assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
 882                "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
 883 
 884         const int field_index = (int)fieldinfo.index();
 885         assert(_inline_layout_info_array != nullptr, "Array must have been created");
 886         assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
 887         _has_inlined_fields = true;
 888         InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
 889         group->add_flat_field(idx, vk, lk);
 890         _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
 891         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 892         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 893         _field_info->adr_at(idx)->set_layout_kind(lk);
 894         // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
 895       }
 896       break;
 897     }
 898     default:
 899       fatal("Something wrong?");
 900     }
 901   }
 902   _root_group->sort_by_size();
 903   _static_fields->sort_by_size();
 904   if (!_contended_groups.is_empty()) {
 905     for (int i = 0; i < _contended_groups.length(); i++) {
 906       _contended_groups.at(i)->sort_by_size();
 907     }
 908   }
 909 }
 910 
 911 /* Field sorting for inline classes:
 912  *   - because inline classes are immutable, the @Contended annotation is ignored
 913  *     when computing their layout (with only read operation, there's no false
 914  *     sharing issue)
 915  *   - this method also records the alignment of the field with the most
 916  *     constraining alignment, this value is then used as the alignment
 917  *     constraint when flattening this inline type into another container
 918  *   - field flattening decisions are taken in this method (those decisions are
 919  *     currently only based in the size of the fields to be flattened, the size
 920  *     of the resulting instance is not considered)
 921  */
 922 void FieldLayoutBuilder::inline_class_field_sorting() {
 923   assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
 924   int alignment = -1;
 925   int idx = 0;
 926   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 927     FieldGroup* group = nullptr;
 928     FieldInfo fieldinfo = *it;
 929     int field_alignment = 1;
 930     if (fieldinfo.access_flags().is_static()) {
 931       group = _static_fields;
 932     } else {
 933       _has_nonstatic_fields = true;
 934       _declared_nonstatic_fields_count++;
 935       group = _root_group;
 936     }
 937     assert(group != nullptr, "invariant");
 938     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 939     switch(type) {
 940     case T_BYTE:
 941     case T_CHAR:
 942     case T_DOUBLE:
 943     case T_FLOAT:
 944     case T_INT:
 945     case T_LONG:
 946     case T_SHORT:
 947     case T_BOOLEAN:
 948       if (group != _static_fields) {
 949         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 950       }
 951       group->add_primitive_field(idx, type);
 952       break;
 953     case T_OBJECT:
 954     case T_ARRAY:
 955     {
 956       bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
 957       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
 958 
 959       if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
 960         _has_inlineable_fields = true;
 961       }
 962 
 963       if (lk == LayoutKind::REFERENCE) {
 964         if (group != _static_fields) {
 965           _nonstatic_oopmap_count++;
 966           field_alignment = type2aelembytes(type); // alignment == size for oops
 967         }
 968         group->add_oop_field(idx);
 969       } else {
 970         assert(group != _static_fields, "Static fields are not flattened");
 971         assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
 972                "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
 973 
 974         const int field_index = (int)fieldinfo.index();
 975         assert(_inline_layout_info_array != nullptr, "Array must have been created");
 976         assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
 977         _has_inlined_fields = true;
 978         InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
 979         if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
 980         group->add_flat_field(idx, vk, lk);
 981         _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
 982         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 983         field_alignment = vk->layout_alignment(lk);
 984         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 985         _field_info->adr_at(idx)->set_layout_kind(lk);
 986       }
 987       break;
 988     }
 989     default:
 990       fatal("Unexpected BasicType");
 991     }
 992     if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 993   }
 994   _root_group->sort_by_size();
 995   _static_fields->sort_by_size();
 996   _payload_alignment = alignment;
 997   assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
 998 }
 999 
1000 LayoutRawBlock* FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
1001   LayoutRawBlock* padding = nullptr;
1002   if (ContendedPaddingWidth > 0) {
1003     padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
1004     _layout->insert(slot, padding);
1005   }
1006   return padding;
1007 }
1008 
1009 // Computation of regular classes layout is an evolution of the previous default layout
1010 // (FieldAllocationStyle 1):
1011 //   - primitive fields (both primitive types and flat inline types) are allocated
1012 //     first (from the biggest to the smallest)
1013 //   - oop fields are allocated, either in existing gaps or at the end of
1014 //     the layout. We allocate oops in a single block to have a single oop map entry.
1015 //   - if the super class ended with an oop, we lead with oops. That will cause the
1016 //     trailing oop map entry of the super class and the oop map entry of this class
1017 //     to be folded into a single entry later. Correspondingly, if the super class
1018 //     ends with a primitive field, we gain nothing by leading with oops; therefore
1019 //     we let oop fields trail, thus giving future derived classes the chance to apply
1020 //     the same trick.
1021 void FieldLayoutBuilder::compute_regular_layout() {
1022   bool need_tail_padding = false;
1023   prologue();
1024   regular_field_sorting();
1025   if (_is_contended) {
1026     // insertion is currently easy because the current strategy doesn't try to fill holes
1027     // in super classes layouts => the _start block is by consequence the _last_block
1028     _layout->set_start(_layout->last_block());
1029     LayoutRawBlock* padding = insert_contended_padding(_layout->start());
1030     if (padding != nullptr) {
1031       // Setting the padding block as start ensures we do not insert past it.
1032       _layout->set_start(padding);
1033     }
1034     need_tail_padding = true;
1035   }
1036 
1037   if (_super_ends_with_oop) {
1038     _layout->add(_root_group->oop_fields());
1039     _layout->add(_root_group->big_primitive_fields());
1040     _layout->add(_root_group->small_primitive_fields());
1041   } else {
1042     _layout->add(_root_group->big_primitive_fields());
1043     _layout->add(_root_group->small_primitive_fields());
1044     _layout->add(_root_group->oop_fields());
1045   }
1046 
1047   if (!_contended_groups.is_empty()) {
1048     for (int i = 0; i < _contended_groups.length(); i++) {
1049       FieldGroup* cg = _contended_groups.at(i);
1050       LayoutRawBlock* start = _layout->last_block();
1051       LayoutRawBlock* padding = insert_contended_padding(start);
1052 
1053       // Do not insert fields past the padding block.
1054       if (padding != nullptr) {
1055         start = padding;
1056       }
1057 
1058       _layout->add(cg->big_primitive_fields(), start);
1059       _layout->add(cg->small_primitive_fields(), start);
1060       _layout->add(cg->oop_fields(), start);
1061       need_tail_padding = true;
1062     }
1063   }
1064 
1065   if (need_tail_padding) {
1066     insert_contended_padding(_layout->last_block());
1067   }
1068 
1069   // Warning: IntanceMirrorKlass expects static oops to be allocated first
1070   _static_layout->add_contiguously(_static_fields->oop_fields());
1071   _static_layout->add(_static_fields->big_primitive_fields());
1072   _static_layout->add(_static_fields->small_primitive_fields());
1073 
1074   epilogue();
1075 }
1076 
1077 /* Computation of inline classes has a slightly different strategy than for
1078  * regular classes. Regular classes have their oop fields allocated at the end
1079  * of the layout to increase GC performances. Unfortunately, this strategy
1080  * increases the number of empty slots inside an instance. Because the purpose
1081  * of inline classes is to be embedded into other containers, it is critical
1082  * to keep their size as small as possible. For this reason, the allocation
1083  * strategy is:
1084  *   - big primitive fields (primitive types and flat inline types larger
1085  *     than an oop) are allocated first (from the biggest to the smallest)
1086  *   - then oop fields
1087  *   - then small primitive fields (from the biggest to the smallest)
1088  */
1089 void FieldLayoutBuilder::compute_inline_class_layout() {
1090 
1091   // Test if the concrete inline class is an empty class (no instance fields)
1092   // and insert a dummy field if needed
1093   if (!_is_abstract_value) {
1094     bool declares_nonstatic_fields = false;
1095     for (FieldInfo fieldinfo : *_field_info) {
1096       if (!fieldinfo.access_flags().is_static()) {
1097         declares_nonstatic_fields = true;
1098         break;
1099       }
1100     }
1101 
1102     if (!declares_nonstatic_fields) {
1103       bool has_inherited_fields = _super_klass != nullptr && _super_klass->has_nonstatic_fields();
1104       if (!has_inherited_fields) {
1105         // Inject ".empty" dummy field
1106         _is_empty_inline_class = true;
1107         FieldInfo::FieldFlags fflags(0);
1108         fflags.update_injected(true);
1109         AccessFlags aflags;
1110         FieldInfo fi(aflags,
1111                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1112                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1113                     0,
1114                     fflags);
1115         int idx = _field_info->append(fi);
1116         _field_info->adr_at(idx)->set_index(idx);
1117       }
1118     }
1119   }
1120 
1121   prologue();
1122   inline_class_field_sorting();
1123 
1124   assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1125 
1126   if (!_layout->super_has_nonstatic_fields()) {
1127     // No inherited fields, the layout must be empty except for the RESERVED block
1128     // PADDING is inserted if needed to ensure the correct alignment of the payload.
1129     if (_is_abstract_value && _has_nonstatic_fields) {
1130       // non-static fields of the abstract class must be laid out without knowing
1131       // the alignment constraints of the fields of the sub-classes, so the worst
1132       // case scenario is assumed, which is currently the alignment of T_LONG.
1133       // PADDING is added if needed to ensure the payload will respect this alignment.
1134       _payload_alignment = type2aelembytes(BasicType::T_LONG);
1135     }
1136     assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY, "Unexpected");
1137     LayoutRawBlock* first_empty = _layout->start()->next_block();
1138     if (first_empty->offset() % _payload_alignment != 0) {
1139       LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1140       _layout->insert(first_empty, padding);
1141       if (first_empty->size() == 0) {
1142         _layout->remove(first_empty);
1143       }
1144       _layout->set_start(padding);
1145     }
1146   } else { // the class has inherited some fields from its super(s)
1147     if (!_is_abstract_value) {
1148       // This is the step where the layout of the final concrete value class' layout
1149       // is computed. Super abstract value classes might have been too conservative
1150       // regarding alignment constraints, but now that the full set of non-static fields is
1151       // known, compute which alignment to use, then set first allowed field offset
1152 
1153       assert(_has_nonstatic_fields, "Concrete value classes must have at least one field");
1154       if (_payload_alignment == -1) { // current class declares no local nonstatic fields
1155         _payload_alignment = _layout->super_min_align_required();
1156       }
1157 
1158       assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1159       assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1160 
1161       if (_payload_alignment < _layout->super_alignment()) {
1162         int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1163         assert(new_alignment % _payload_alignment == 0, "Must be");
1164         assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1165         _payload_alignment = new_alignment;
1166       }
1167       _layout->set_start(_layout->first_field_block());
1168     } else {
1169       // Abstract value class inheriting fields, restore the pessimistic alignment
1170       // constraint (see comment above) and ensure no field will be inserted before
1171       // the first inherited field.
1172       _payload_alignment = type2aelembytes(BasicType::T_LONG);
1173       _layout->set_start(_layout->first_field_block());
1174     }
1175   }
1176 
1177   _layout->add(_root_group->big_primitive_fields());
1178   _layout->add(_root_group->oop_fields());
1179   _layout->add(_root_group->small_primitive_fields());
1180 
1181   LayoutRawBlock* first_field = _layout->first_field_block();
1182   if (first_field != nullptr) {
1183     _payload_offset = _layout->first_field_block()->offset();
1184     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1185   } else {
1186     assert(_is_abstract_value, "Concrete inline types must have at least one field");
1187     _payload_offset = _layout->blocks()->size();
1188     _payload_size_in_bytes = 0;
1189   }
1190 
1191   // Determining if the value class is naturally atomic:
1192   if ((!_layout->super_has_nonstatic_fields() && _declared_nonstatic_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1193       || (_layout->super_has_nonstatic_fields() && _super_klass->is_naturally_atomic() && _declared_nonstatic_fields_count == 0)) {
1194         _is_naturally_atomic = true;
1195   }
1196 
1197   // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1198   // From this, additional layouts will be computed: atomic and nullable layouts
1199   // Once those additional layouts are computed, the raw layout might need some adjustments
1200 
1201   bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1202 
1203   if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1204     // Validation of the non atomic layout
1205     if (UseNullFreeNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1206       _null_free_non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1207       _null_free_non_atomic_layout_alignment = _payload_alignment;
1208     }
1209 
1210     // Next step is to compute the characteristics for a layout enabling atomic updates
1211     if (UseNullFreeAtomicValueFlattening) {
1212       int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1213       if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1214         _null_free_atomic_layout_size_in_bytes = atomic_size;
1215       }
1216     }
1217 
1218     // Next step is the nullable layouts: they must include a null marker
1219     if (UseNullableAtomicValueFlattening || UseNullableNonAtomicValueFlattening) {
1220       // Looking if there's an empty slot inside the layout that could be used to store a null marker
1221       LayoutRawBlock* b = _layout->first_field_block();
1222       assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1223       int null_marker_offset = -1;
1224       if (_is_empty_inline_class) {
1225         // Reusing the dummy field as a field marker
1226         assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1227         null_marker_offset = b->offset();
1228       } else {
1229         while (b != _layout->last_block()) {
1230           if (b->block_kind() == LayoutRawBlock::EMPTY) {
1231             break;
1232           }
1233           b = b->next_block();
1234         }
1235         if (b != _layout->last_block()) {
1236           // found an empty slot, register its offset from the beginning of the payload
1237           null_marker_offset = b->offset();
1238           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1239           _layout->add_field_at_offset(marker, b->offset());
1240         }
1241         if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1242           int last_offset = _layout->last_block()->offset();
1243           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1244           _layout->insert_field_block(_layout->last_block(), marker);
1245           assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1246           null_marker_offset = marker->offset();
1247         }
1248       }
1249       assert(null_marker_offset != -1, "Sanity check");
1250       // Now that the null marker is there, the size of the nullable layout must computed
1251       int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1252       if (UseNullableNonAtomicValueFlattening) {
1253         _nullable_non_atomic_layout_size_in_bytes = new_raw_size;
1254         _null_marker_offset = null_marker_offset;
1255         _null_free_non_atomic_layout_alignment = _payload_alignment;
1256       }
1257       if (UseNullableAtomicValueFlattening) {
1258         // For the nullable atomic layout, the size mut be compatible with the platform capabilities
1259         int nullable_atomic_size = round_up_power_of_2(new_raw_size);
1260         if (nullable_atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1261           _nullable_atomic_layout_size_in_bytes = nullable_atomic_size;
1262           _null_marker_offset = null_marker_offset;
1263         }
1264       }
1265       if (_null_marker_offset == -1) { // No nullable layout has been accepted
1266         // If the nullable layout is rejected, the NULL_MARKER block should be removed
1267         // from the layout, otherwise it will appear anyway if the layout is printer
1268         if (!_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1269           _layout->remove_null_marker();
1270         }
1271       }
1272     }
1273     // If the inline class has an atomic or nullable atomic layout,
1274     // we want the raw layout to have the same alignment as those atomic layouts so access codes
1275     // could remain simple (single instruction without intermediate copy). This might required
1276     // to shift all fields in the raw layout, but this operation is possible only if the class
1277     // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1278     // field shift is needed but not possible, all atomic layouts are disabled and only reference
1279     // and loosely consistent are supported.
1280     int required_alignment = _payload_alignment;
1281     if (has_null_free_atomic_layout() && required_alignment < null_free_atomic_layout_size_in_bytes()) {
1282       required_alignment = null_free_atomic_layout_size_in_bytes();
1283     }
1284     if (has_nullable_atomic_layout() && required_alignment < nullable_atomic_layout_size_in_bytes()) {
1285       required_alignment = nullable_atomic_layout_size_in_bytes();
1286     }
1287     int shift = (required_alignment - (first_field->offset() % required_alignment)) % required_alignment;
1288     if (shift != 0) {
1289       if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1290         assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1291         _layout->shift_fields(shift);
1292         _payload_offset = _layout->first_field_block()->offset();
1293         assert(is_aligned(_payload_offset, required_alignment), "Fields should have been shifted to respect the required alignment");
1294         if (has_nullable_atomic_layout() || has_nullable_non_atomic_layout()) {
1295           assert(!_is_empty_inline_class, "Should not get here with empty values");
1296           _null_marker_offset = _layout->find_null_marker()->offset();
1297         }
1298         _payload_alignment = required_alignment;
1299       } else {
1300         _null_free_atomic_layout_size_in_bytes = -1;
1301         if (has_nullable_atomic_layout() && !has_nullable_non_atomic_layout() && !_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1302           _layout->remove_null_marker();
1303           _null_marker_offset = -1;
1304         }
1305         _nullable_atomic_layout_size_in_bytes = -1;
1306       }
1307     } else {
1308       _payload_alignment = required_alignment;
1309     }
1310 
1311     // If the inline class has a nullable layout, the layout used in heap allocated standalone
1312     // instances must also be the nullable layout, in order to be able to set the null marker to
1313     // non-null before copying the payload to other containers.
1314     if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_atomic_layout_size_in_bytes()) {
1315       _payload_size_in_bytes = nullable_atomic_layout_size_in_bytes();
1316     }
1317     if (has_nullable_non_atomic_layout() && payload_layout_size_in_bytes() < nullable_non_atomic_layout_size_in_bytes()) {
1318       _payload_size_in_bytes = nullable_non_atomic_layout_size_in_bytes();
1319     }
1320 
1321     // if the inline class has a null-free atomic layout, the the layout used in heap allocated standalone
1322     // instances must have at least equal to the atomic layout to allow safe read/write atomic
1323     // operation
1324     if (has_null_free_atomic_layout() && payload_layout_size_in_bytes() < null_free_atomic_layout_size_in_bytes()) {
1325       _payload_size_in_bytes = null_free_atomic_layout_size_in_bytes();
1326     }
1327   }
1328   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1329   _static_layout->add_contiguously(_static_fields->oop_fields());
1330   _static_layout->add(_static_fields->big_primitive_fields());
1331   _static_layout->add(_static_fields->small_primitive_fields());
1332 
1333   generate_acmp_maps();
1334   epilogue();
1335 }
1336 
1337 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1338                 InlineKlass* vklass, int offset) {
1339   int diff = offset - vklass->payload_offset();
1340   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1341   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1342   while (map < last_map) {
1343     nonstatic_oop_maps->add(map->offset() + diff, map->count());
1344     map++;
1345   }
1346 }
1347 
1348 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1349   if (list == nullptr) return;
1350   for (int i = 0; i < list->length(); i++) {
1351     LayoutRawBlock* f = list->at(i);
1352     if (f->block_kind() == LayoutRawBlock::FLAT) {
1353       InlineKlass* vk = f->inline_klass();
1354       assert(vk != nullptr, "Should have been initialized");
1355       if (vk->contains_oops()) {
1356         add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1357       }
1358     }
1359   }
1360 }
1361 
1362 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1363   if (group->oop_fields() != nullptr) {
1364     for (int i = 0; i < group->oop_fields()->length(); i++) {
1365       LayoutRawBlock* b = group->oop_fields()->at(i);
1366       nonstatic_oop_maps->add(b->offset(), 1);
1367     }
1368   }
1369   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1370 }
1371 
1372 static int insert_segment(GrowableArray<AcmpMapSegment>* map, int offset, int size, int last_idx) {
1373   if (map->is_empty()) {
1374     return map->append(AcmpMapSegment(offset, size));
1375   }
1376   int start = map->adr_at(last_idx)->_offset > offset ? 0 : last_idx;
1377   bool inserted = false;
1378   for (int c = start; c < map->length(); c++) {
1379     if (offset == (map->adr_at(c)->_offset + map->adr_at(c)->_size)) {
1380       //contiguous to the last field, can be coalesced
1381       map->adr_at(c)->_size = map->adr_at(c)->_size + size;
1382       inserted = true;
1383       break;  // break out of the for loop
1384     }
1385     if (offset < (map->adr_at(c)->_offset)) {
1386       map->insert_before(c, AcmpMapSegment(offset, size));
1387       last_idx = c;
1388       inserted = true;
1389       break;  // break out of the for loop
1390     }
1391   }
1392   if (!inserted) {
1393     last_idx = map->append(AcmpMapSegment(offset, size));
1394   }
1395   return last_idx;
1396 }
1397 
1398 static int insert_map_at_offset(GrowableArray<AcmpMapSegment>* nonoop_map, GrowableArray<int>* oop_map,
1399                                 const InstanceKlass* ik, int field_offset, int last_idx) {
1400   Array<int>* super_map = ik->acmp_maps_array();
1401   assert(super_map != nullptr, "super class must have an acmp map");
1402   int num_nonoop_field = super_map->at(0);
1403   for (int i = 0; i < num_nonoop_field; i++) {
1404     last_idx = insert_segment(nonoop_map,
1405                               field_offset + super_map->at( i * 2 + 1),
1406                               super_map->at( i * 2 + 2), last_idx);
1407   }
1408   int len = super_map->length();
1409   for (int i = num_nonoop_field * 2 + 1; i < len; i++) {
1410       oop_map->append(field_offset + super_map->at(i));
1411   }
1412   return last_idx;
1413 }
1414 
1415 static void split_after(GrowableArray<AcmpMapSegment>* map, int idx, int head) {
1416   int offset = map->adr_at(idx)->_offset;
1417   int size = map->adr_at(idx)->_size;
1418   if (size <= head) return;
1419   map->adr_at(idx)->_offset = offset + head;
1420   map->adr_at(idx)->_size = size - head;
1421   map->insert_before(idx, AcmpMapSegment(offset, head));
1422 
1423 }
1424 
1425 void FieldLayoutBuilder::generate_acmp_maps() {
1426   assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1427 
1428   // create/initialize current class' maps
1429   _nonoop_acmp_map = new GrowableArray<AcmpMapSegment>();
1430   _oop_acmp_map = new GrowableArray<int>();
1431   if (_is_empty_inline_class) return;
1432   // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1433   // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1434   // next to the previous insertion. However, in some cases local fields and inherited fields can be
1435   // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1436   int last_idx = 0;
1437   if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) {  // Assumes j.l.Object cannot have fields
1438     last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, last_idx);
1439   }
1440 
1441   // Processing local fields
1442   LayoutRawBlock* b = _layout->blocks();
1443   while(b != _layout->last_block()) {
1444     switch(b->block_kind()) {
1445       case LayoutRawBlock::RESERVED:
1446       case LayoutRawBlock::EMPTY:
1447       case LayoutRawBlock::PADDING:
1448       case LayoutRawBlock::NULL_MARKER:
1449       case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1450         // skip
1451         break;
1452 
1453       case LayoutRawBlock::REGULAR:
1454         {
1455           FieldInfo* fi = _field_info->adr_at(b->field_index());
1456           if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1457             _oop_acmp_map->append(b->offset());
1458           } else {
1459             // Non-oop case
1460             last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1461           }
1462           break;
1463        }
1464       case LayoutRawBlock::FLAT:
1465         {
1466           InlineKlass* vk = b->inline_klass();
1467           int field_offset = b->offset() - vk->payload_offset();
1468           last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, field_offset, last_idx);
1469           if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1470             int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1471             last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1472             // Important note: the implementation assumes that for nullable flat fields, if the
1473             // null marker is zero (field is null), then all the fields of the flat field are also
1474             // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1475             // all fields are included in the map, plus the null marker
1476             // If it happens that the assumption above is wrong, then nullable flat fields would
1477             // require a dedicated section in the acmp map, and be handled differently: null_marker
1478             // comparison first, and if null markers are identical and non-zero, then conditional
1479             // comparison of the other fields
1480           }
1481         }
1482         break;
1483 
1484     }
1485     b = b->next_block();
1486   }
1487 
1488   // split segments into well-aligned blocks
1489   int idx = 0;
1490   while (idx < _nonoop_acmp_map->length()) {
1491     int offset = _nonoop_acmp_map->adr_at(idx)->_offset;
1492     int size = _nonoop_acmp_map->adr_at(idx)->_size;
1493     int mod = offset % 8;
1494     switch (mod) {
1495       case 0:
1496         break;
1497       case 4:
1498         split_after(_nonoop_acmp_map, idx, 4);
1499         break;
1500       case 2:
1501       case 6:
1502         split_after(_nonoop_acmp_map, idx, 2);
1503         break;
1504       case 1:
1505       case 3:
1506       case 5:
1507       case 7:
1508         split_after(_nonoop_acmp_map, idx, 1);
1509         break;
1510       default:
1511         ShouldNotReachHere();
1512     }
1513     idx++;
1514   }
1515 }
1516 
1517 void FieldLayoutBuilder::epilogue() {
1518   // Computing oopmaps
1519   OopMapBlocksBuilder* nonstatic_oop_maps =
1520       new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1521   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1522   if (super_oop_map_count > 0) {
1523     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1524     _super_klass->nonstatic_oop_map_count());
1525   }
1526   register_embedded_oops(nonstatic_oop_maps, _root_group);
1527   if (!_contended_groups.is_empty()) {
1528     for (int i = 0; i < _contended_groups.length(); i++) {
1529       FieldGroup* cg = _contended_groups.at(i);
1530       if (cg->oop_count() > 0) {
1531         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1532         register_embedded_oops(nonstatic_oop_maps, cg);
1533       }
1534     }
1535   }
1536   nonstatic_oop_maps->compact();
1537 
1538   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1539   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1540   int static_fields_size = (static_fields_end -
1541       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1542   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1543 
1544   // Pass back information needed for InstanceKlass creation
1545 
1546   _info->oop_map_blocks = nonstatic_oop_maps;
1547   _info->_instance_size = align_object_size(instance_end / wordSize);
1548   _info->_static_field_size = static_fields_size;
1549   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1550   _info->_has_nonstatic_fields = _has_nonstatic_fields;
1551   _info->_has_inlined_fields = _has_inlined_fields;
1552   _info->_is_naturally_atomic = _is_naturally_atomic;
1553   if (_is_inline_type) {
1554     _info->_must_be_atomic = _must_be_atomic;
1555     _info->_payload_alignment = _payload_alignment;
1556     _info->_payload_offset = _payload_offset;
1557     _info->_payload_size_in_bytes = _payload_size_in_bytes;
1558     _info->_null_free_non_atomic_size_in_bytes = _null_free_non_atomic_layout_size_in_bytes;
1559     _info->_null_free_non_atomic_alignment = _null_free_non_atomic_layout_alignment;
1560     _info->_null_free_atomic_layout_size_in_bytes = _null_free_atomic_layout_size_in_bytes;
1561     _info->_nullable_atomic_layout_size_in_bytes = _nullable_atomic_layout_size_in_bytes;
1562     _info->_nullable_non_atomic_layout_size_in_bytes = _nullable_non_atomic_layout_size_in_bytes;
1563     _info->_null_marker_offset = _null_marker_offset;
1564     _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1565     _info->_is_empty_inline_klass = _is_empty_inline_class;
1566   }
1567 
1568   // Acmp maps are needed for both concrete and abstract value classes
1569   if (_is_inline_type || _is_abstract_value) {
1570     _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1571     _info->_nonoop_acmp_map = _nonoop_acmp_map;
1572     _info->_oop_acmp_map = _oop_acmp_map;
1573   }
1574 
1575   // This may be too restrictive, since if all the fields fit in 64
1576   // bits we could make the decision to align instances of this class
1577   // to 64-bit boundaries, and load and store them as single words.
1578   // And on machines which supported larger atomics we could similarly
1579   // allow larger values to be atomic, if properly aligned.
1580 
1581 #ifdef ASSERT
1582   // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1583   // which prints the details of LayoutRawBlocks used to compute the layout.
1584   // The code below checks that offsets in the _field_info meta-data match offsets
1585   // in the LayoutRawBlocks
1586   LayoutRawBlock* b = _layout->blocks();
1587   while(b != _layout->last_block()) {
1588     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1589       if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1590         tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1591       }
1592       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1593     }
1594     b = b->next_block();
1595   }
1596   b = _static_layout->blocks();
1597   while(b != _static_layout->last_block()) {
1598     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1599       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1600     }
1601     b = b->next_block();
1602   }
1603 #endif // ASSERT
1604 
1605   static bool first_layout_print = true;
1606 
1607   if (PrintFieldLayout || (PrintInlineLayout && (_has_inlineable_fields || _is_inline_type || _is_abstract_value))) {
1608     ResourceMark rm;
1609     stringStream st;
1610     if (first_layout_print) {
1611       st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1612       st.print_cr("Heap oop size = %d", heapOopSize);
1613       first_layout_print = false;
1614     }
1615     if (_super_klass != nullptr) {
1616       st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1617                     _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1618     } else {
1619       st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1620     }
1621     st.print_cr("Instance fields:");
1622     const bool dummy_field_is_reused_as_null_marker = _is_empty_inline_class && _null_marker_offset != -1;
1623     _layout->print(&st, false, _super_klass, _inline_layout_info_array, dummy_field_is_reused_as_null_marker);
1624     st.print_cr("Static fields:");
1625     _static_layout->print(&st, true, nullptr, _inline_layout_info_array, false);
1626     st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1627     if (_is_inline_type) {
1628       st.print_cr("First field offset = %d", _payload_offset);
1629       st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1630                   _payload_size_in_bytes, _payload_alignment);
1631       if (has_null_free_non_atomic_flat_layout()) {
1632         st.print_cr("%s layout: %d/%d",
1633                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1634                     _null_free_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1635       } else {
1636         st.print_cr("%s layout: -/-",
1637                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1638       }
1639       if (has_null_free_atomic_layout()) {
1640         st.print_cr("%s layout: %d/%d",
1641                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1642                     _null_free_atomic_layout_size_in_bytes, _null_free_atomic_layout_size_in_bytes);
1643       } else {
1644         st.print_cr("%s layout: -/-",
1645                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1646       }
1647       if (has_nullable_atomic_layout()) {
1648         st.print_cr("%s layout: %d/%d",
1649                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1650                     _nullable_atomic_layout_size_in_bytes, _nullable_atomic_layout_size_in_bytes);
1651       } else {
1652         st.print_cr("%s layout: -/-",
1653                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1654       }
1655       if (has_nullable_non_atomic_layout()) {
1656         st.print_cr("%s layout: %d/%d",
1657                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT),
1658                     _nullable_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1659       } else {
1660         st.print_cr("%s layout: -/-",
1661                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT));
1662       }
1663       if (_null_marker_offset != -1) {
1664         st.print_cr("Null marker offset = %d", _null_marker_offset);
1665       }
1666       st.print("Non-oop acmp map <offset,size>: ");
1667       for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1668         st.print("<%d,%d>, ", _nonoop_acmp_map->at(i)._offset,  _nonoop_acmp_map->at(i)._size);
1669       }
1670       st.print_cr("");
1671       st.print("oop acmp map: ");
1672       for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1673         st.print("%d, ", _oop_acmp_map->at(i));
1674       }
1675       st.print_cr("");
1676     }
1677     st.print_cr("---");
1678     // Print output all together.
1679     tty->print_raw(st.as_string());
1680   }
1681 }
1682 
1683 void FieldLayoutBuilder::build_layout() {
1684   if (_is_inline_type || _is_abstract_value) {
1685     compute_inline_class_layout();
1686   } else {
1687     compute_regular_layout();
1688   }
1689 }