1 /*
   2  * Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "jvm.h"
  30 #include "memory/resourceArea.hpp"
  31 #include "oops/array.hpp"
  32 #include "oops/fieldStreams.inline.hpp"
  33 #include "oops/inlineKlass.inline.hpp"
  34 #include "oops/instanceKlass.inline.hpp"
  35 #include "oops/instanceMirrorKlass.hpp"
  36 #include "oops/klass.inline.hpp"
  37 #include "runtime/fieldDescriptor.inline.hpp"
  38 #include "utilities/powerOfTwo.hpp"
  39 
  40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
  41                                          bool can_use_atomic_flat) {
  42 
  43   // The can_use_atomic_flat argument indicates if an atomic flat layout can be used for this field.
  44   // This argument will be false if the container is a loosely consistent value class. Using an atomic layout
  45   // in a container that has no atomicity guarantee creates a risk to see this field's value be subject to
  46   // tearing even if the field's class was declared atomic (non loosely consistent).
  47 
  48   if (!UseFieldFlattening) {
  49     return LayoutKind::REFERENCE;
  50   }
  51 
  52   if (field_info.field_flags().is_injected()) {
  53     // don't flatten injected fields
  54     return LayoutKind::REFERENCE;
  55   }
  56 
  57   if (field_info.access_flags().is_volatile()) {
  58     // volatile is used as a keyword to prevent flattening
  59     return LayoutKind::REFERENCE;
  60   }
  61 
  62   if (field_info.access_flags().is_static()) {
  63     assert(inline_layout_info_array == nullptr ||
  64                inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr,
  65            "Static fields do not have inline layout info");
  66     // don't flatten static fields
  67     return LayoutKind::REFERENCE;
  68   }
  69 
  70   if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
  71     // field's type is not a known value class, using a reference
  72     return LayoutKind::REFERENCE;
  73   }
  74 
  75   InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
  76   InlineKlass* vk = inline_field_info->klass();
  77 
  78   if (field_info.field_flags().is_null_free_inline_type()) {
  79     assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
  80     if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
  81       if (vk->is_naturally_atomic() && vk->has_null_free_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
  82       return (vk->has_null_free_atomic_layout() && can_use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  83     } else {
  84       return vk->has_null_free_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
  85     }
  86   } else {
  87     // To preserve the consistency between the null-marker and the field content, the NULLABLE_NON_ATOMIC_FLAT
  88     // can only be used in containers that have atomicity quarantees (can_use_atomic_flat argument set to true)
  89     if (field_info.access_flags().is_strict() && field_info.access_flags().is_final() && can_use_atomic_flat) {
  90       if (vk->has_nullable_non_atomic_layout()) return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
  91     }
  92     // Another special case where NULLABLE_NON_ATOMIC_FLAT can be used: nullable empty values, because the
  93     // payload of those values contains only the null-marker
  94     if (vk->is_empty_inline_type() && vk->has_nullable_non_atomic_layout()) {
  95       return LayoutKind::NULLABLE_NON_ATOMIC_FLAT;
  96     }
  97     if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
  98       return can_use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  99     } else {
 100       return LayoutKind::REFERENCE;
 101     }
 102   }
 103 }
 104 
 105 static bool field_is_inlineable(FieldInfo fieldinfo, LayoutKind lk, Array<InlineLayoutInfo>* ili) {
 106   if (fieldinfo.field_flags().is_null_free_inline_type()) {
 107     // A null-free inline type is always inlineable
 108     return true;
 109   }
 110 
 111   if (lk != LayoutKind::REFERENCE) {
 112     assert(lk != LayoutKind::BUFFERED, "Sanity check");
 113     assert(lk != LayoutKind::UNKNOWN, "Sanity check");
 114     // We've chosen a layout that isn't a normal reference
 115     return true;
 116   }
 117 
 118   const int field_index = (int)fieldinfo.index();
 119   if (!fieldinfo.field_flags().is_injected() &&
 120       ili != nullptr &&
 121       ili->adr_at(field_index)->klass() != nullptr &&
 122       !ili->adr_at(field_index)->klass()->is_identity_class() &&
 123       !ili->adr_at(field_index)->klass()->is_abstract()) {
 124     // The field's klass is not an identity class or abstract
 125     return true;
 126   }
 127 
 128   return false;
 129 }
 130 
 131 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
 132   _next_block(nullptr),
 133   _prev_block(nullptr),
 134   _inline_klass(nullptr),
 135   _block_kind(kind),
 136   _layout_kind(LayoutKind::UNKNOWN),
 137   _offset(-1),
 138   _alignment(1),
 139   _size(size),
 140   _field_index(-1) {
 141   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
 142          "Otherwise, should use the constructor with a field index argument");
 143   assert(size > 0, "Sanity check");
 144 }
 145 
 146 
 147 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
 148  _next_block(nullptr),
 149  _prev_block(nullptr),
 150  _inline_klass(nullptr),
 151  _block_kind(kind),
 152  _layout_kind(LayoutKind::UNKNOWN),
 153  _offset(-1),
 154  _alignment(alignment),
 155  _size(size),
 156  _field_index(index) {
 157   assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
 158          "Other kind do not have a field index");
 159   assert(size > 0, "Sanity check");
 160   assert(alignment > 0, "Sanity check");
 161 }
 162 
 163 bool LayoutRawBlock::fit(int size, int alignment) {
 164   int adjustment = 0;
 165   if ((_offset % alignment) != 0) {
 166     adjustment = alignment - (_offset % alignment);
 167   }
 168   return _size >= size + adjustment;
 169 }
 170 
 171 FieldGroup::FieldGroup(int contended_group) :
 172   _next(nullptr),
 173   _small_primitive_fields(nullptr),
 174   _big_primitive_fields(nullptr),
 175   _oop_fields(nullptr),
 176   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
 177   _oop_count(0) {}
 178 
 179 void FieldGroup::add_primitive_field(int idx, BasicType type) {
 180   int size = type2aelembytes(type);
 181   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
 182   if (size >= heapOopSize) {
 183     add_to_big_primitive_list(block);
 184   } else {
 185     add_to_small_primitive_list(block);
 186   }
 187 }
 188 
 189 void FieldGroup::add_oop_field(int idx) {
 190   int size = type2aelembytes(T_OBJECT);
 191   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
 192   if (_oop_fields == nullptr) {
 193     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 194   }
 195   _oop_fields->append(block);
 196   _oop_count++;
 197 }
 198 
 199 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk) {
 200   const int size = vk->layout_size_in_bytes(lk);
 201   const int alignment = vk->layout_alignment(lk);
 202 
 203   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
 204   block->set_inline_klass(vk);
 205   block->set_layout_kind(lk);
 206   if (block->size() >= heapOopSize) {
 207     add_to_big_primitive_list(block);
 208   } else {
 209     assert(!vk->contains_oops(), "Size of Inline klass with oops should be >= heapOopSize");
 210     add_to_small_primitive_list(block);
 211   }
 212 }
 213 
 214 void FieldGroup::sort_by_size() {
 215   if (_small_primitive_fields != nullptr) {
 216     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 217   }
 218   if (_big_primitive_fields != nullptr) {
 219     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 220   }
 221 }
 222 
 223 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 224   if (_small_primitive_fields == nullptr) {
 225     _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 226   }
 227   _small_primitive_fields->append(block);
 228 }
 229 
 230 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 231   if (_big_primitive_fields == nullptr) {
 232     _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 233   }
 234   _big_primitive_fields->append(block);
 235 }
 236 
 237 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
 238   _field_info(field_info),
 239   _inline_layout_info_array(inline_layout_info_array),
 240   _cp(cp),
 241   _blocks(nullptr),
 242   _start(_blocks),
 243   _last(_blocks),
 244   _super_first_field_offset(-1),
 245   _super_alignment(-1),
 246   _super_min_align_required(-1),
 247   _null_reset_value_offset(-1),
 248   _acmp_maps_offset(-1),
 249   _super_has_nonstatic_fields(false),
 250   _has_inherited_fields(false) {}
 251 
 252 void FieldLayout::initialize_static_layout() {
 253   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 254   _blocks->set_offset(0);
 255   _last = _blocks;
 256   _start = _blocks;
 257   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 258   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 259   // of static field is computed. Field offsets are fixed later when the size is known
 260   // (see java_lang_Class::fixup_mirror())
 261   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 262     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 263     _blocks->set_offset(0);
 264   }
 265 }
 266 
 267 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 268   if (super_klass == nullptr) {
 269     super_ends_with_oop = false;
 270     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 271     _blocks->set_offset(0);
 272     _last = _blocks;
 273     _start = _blocks;
 274     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 275   } else {
 276     reconstruct_layout(super_klass, _super_has_nonstatic_fields, super_ends_with_oop);
 277     fill_holes(super_klass);
 278     if ((!super_klass->has_contended_annotations()) || !_super_has_nonstatic_fields) {
 279       _start = _blocks;  // start allocating fields from the first empty block
 280     } else {
 281       _start = _last;    // append fields at the end of the reconstructed layout
 282     }
 283   }
 284 }
 285 
 286 LayoutRawBlock* FieldLayout::first_field_block() {
 287   LayoutRawBlock* block = _blocks;
 288   while (block != nullptr
 289          && block->block_kind() != LayoutRawBlock::INHERITED
 290          && block->block_kind() != LayoutRawBlock::REGULAR
 291          && block->block_kind() != LayoutRawBlock::FLAT
 292          && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 293     block = block->next_block();
 294   }
 295   return block;
 296 }
 297 
 298 // Insert a set of fields into a layout.
 299 // For each field, search for an empty slot able to fit the field
 300 // (satisfying both size and alignment requirements), if none is found,
 301 // add the field at the end of the layout.
 302 // Fields cannot be inserted before the block specified in the "start" argument
 303 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 304   if (list == nullptr) return;
 305   if (start == nullptr) start = this->_start;
 306   bool last_search_success = false;
 307   int last_size = 0;
 308   int last_alignment = 0;
 309   for (int i = 0; i < list->length(); i ++) {
 310     LayoutRawBlock* b = list->at(i);
 311     LayoutRawBlock* cursor = nullptr;
 312     LayoutRawBlock* candidate = nullptr;
 313     // if start is the last block, just append the field
 314     if (start == last_block()) {
 315       candidate = last_block();
 316     }
 317     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 318     // check if the previous field had the same requirements and if the search for a fitting slot
 319     // was successful. If the requirements were the same but the search failed, a new search will
 320     // fail the same way, so just append the field at the of the layout.
 321     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 322       candidate = last_block();
 323     } else {
 324       // Iterate over the layout to find an empty slot fitting the field's requirements
 325       last_size = b->size();
 326       last_alignment = b->alignment();
 327       cursor = last_block()->prev_block();
 328       assert(cursor != nullptr, "Sanity check");
 329       last_search_success = true;
 330 
 331       while (cursor != start) {
 332         if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 333           if (candidate == nullptr || cursor->size() < candidate->size()) {
 334             candidate = cursor;
 335           }
 336         }
 337         cursor = cursor->prev_block();
 338       }
 339       if (candidate == nullptr) {
 340         candidate = last_block();
 341         last_search_success = false;
 342       }
 343       assert(candidate != nullptr, "Candidate must not be null");
 344       assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 345       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 346     }
 347     insert_field_block(candidate, b);
 348   }
 349 }
 350 
 351 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 352 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 353   assert(block != nullptr, "Sanity check");
 354   block->set_offset(offset);
 355   if (start == nullptr) {
 356     start = this->_start;
 357   }
 358   LayoutRawBlock* slot = start;
 359   while (slot != nullptr) {
 360     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 361         slot == _last){
 362       assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 363       assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
 364       if (slot->offset() < block->offset()) {
 365         int adjustment = block->offset() - slot->offset();
 366         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 367         insert(slot, adj);
 368       }
 369       insert(slot, block);
 370       if (slot->size() == 0) {
 371         remove(slot);
 372       }
 373       if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
 374         _field_info->adr_at(block->field_index())->set_offset(block->offset());
 375       }
 376       return;
 377     }
 378     slot = slot->next_block();
 379   }
 380   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 381 }
 382 
 383 // The allocation logic uses a best fit strategy: the set of fields is allocated
 384 // in the first empty slot big enough to contain the whole set ((including padding
 385 // to fit alignment constraints).
 386 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 387   if (list == nullptr) return;
 388   if (start == nullptr) {
 389     start = _start;
 390   }
 391   // This code assumes that if the first block is well aligned, the following
 392   // blocks would naturally be well aligned (no need for adjustment)
 393   int size = 0;
 394   for (int i = 0; i < list->length(); i++) {
 395     size += list->at(i)->size();
 396   }
 397 
 398   LayoutRawBlock* candidate = nullptr;
 399   if (start == last_block()) {
 400     candidate = last_block();
 401   } else {
 402     LayoutRawBlock* first = list->at(0);
 403     candidate = last_block()->prev_block();
 404     while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 405       if (candidate == start) {
 406         candidate = last_block();
 407         break;
 408       }
 409       candidate = candidate->prev_block();
 410     }
 411     assert(candidate != nullptr, "Candidate must not be null");
 412     assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 413     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 414   }
 415 
 416   for (int i = 0; i < list->length(); i++) {
 417     LayoutRawBlock* b = list->at(i);
 418     insert_field_block(candidate, b);
 419     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 420   }
 421 }
 422 
 423 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 424   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 425   if (slot->offset() % block->alignment() != 0) {
 426     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 427     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 428     insert(slot, adj);
 429   }
 430   assert(block->size() >= block->size(), "Enough space must remain after adjustment");
 431   insert(slot, block);
 432   if (slot->size() == 0) {
 433     remove(slot);
 434   }
 435   // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
 436   if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 437     _field_info->adr_at(block->field_index())->set_offset(block->offset());
 438     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
 439       _null_reset_value_offset = block->offset();
 440     }
 441     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
 442       _acmp_maps_offset = block->offset();
 443     }
 444   }
 445   if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
 446     int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
 447     _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 448     _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 449   }
 450 
 451   return block;
 452 }
 453 
 454 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_nonstatic_fields, bool& ends_with_oop) {
 455   has_nonstatic_fields = ends_with_oop = false;
 456   if (ik->is_abstract() && !ik->is_identity_class()) {
 457     _super_alignment = type2aelembytes(BasicType::T_LONG);
 458   }
 459   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 460   BasicType last_type;
 461   int last_offset = -1;
 462   while (ik != nullptr) {
 463     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 464       BasicType type = Signature::basic_type(fs.signature());
 465       // distinction between static and non-static fields is missing
 466       if (fs.access_flags().is_static()) continue;
 467       has_nonstatic_fields = true;
 468       _has_inherited_fields = true;
 469       if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
 470         _super_first_field_offset = fs.offset();
 471       }
 472       LayoutRawBlock* block;
 473       if (fs.is_flat()) {
 474         InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
 475         InlineKlass* vk = layout_info.klass();
 476         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
 477                                    vk->layout_size_in_bytes(layout_info.kind()),
 478                                    vk->layout_alignment(layout_info.kind()));
 479         assert(_super_alignment == -1 || _super_alignment >=  vk->payload_alignment(), "Invalid value alignment");
 480         _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
 481       } else {
 482         int size = type2aelembytes(type);
 483         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 484         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
 485         // For primitive types, the alignment is equal to the size
 486         assert(_super_alignment == -1 || _super_alignment >=  size, "Invalid value alignment");
 487         _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
 488       }
 489       if (fs.offset() > last_offset) {
 490         last_offset = fs.offset();
 491         last_type = type;
 492       }
 493       block->set_offset(fs.offset());
 494       all_fields->append(block);
 495     }
 496     ik = ik->super() == nullptr ? nullptr : ik->super();
 497   }
 498   assert(last_offset == -1 || last_offset > 0, "Sanity");
 499   if (last_offset > 0 &&
 500       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 501     ends_with_oop = true;
 502   }
 503 
 504   all_fields->sort(LayoutRawBlock::compare_offset);
 505   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 506   _blocks->set_offset(0);
 507   _last = _blocks;
 508   for(int i = 0; i < all_fields->length(); i++) {
 509     LayoutRawBlock* b = all_fields->at(i);
 510     _last->set_next_block(b);
 511     b->set_prev_block(_last);
 512     _last = b;
 513   }
 514   _start = _blocks;
 515 }
 516 
 517 // Called during the reconstruction of a layout, after fields from super
 518 // classes have been inserted. It fills unused slots between inserted fields
 519 // with EMPTY blocks, so the regular field insertion methods would work.
 520 // This method handles classes with @Contended annotations differently
 521 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 522 // fields to interfere with contended fields/classes.
 523 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 524   assert(_blocks != nullptr, "Sanity check");
 525   assert(_blocks->offset() == 0, "first block must be at offset zero");
 526   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 527   LayoutRawBlock* b = _blocks;
 528   while (b->next_block() != nullptr) {
 529     if (b->next_block()->offset() > (b->offset() + b->size())) {
 530       int size = b->next_block()->offset() - (b->offset() + b->size());
 531       // FIXME it would be better if initial empty block where tagged as PADDING for value classes
 532       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 533       empty->set_offset(b->offset() + b->size());
 534       empty->set_next_block(b->next_block());
 535       b->next_block()->set_prev_block(empty);
 536       b->set_next_block(empty);
 537       empty->set_prev_block(b);
 538     }
 539     b = b->next_block();
 540   }
 541   assert(b->next_block() == nullptr, "Invariant at this point");
 542   assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
 543   // If the super class has @Contended annotation, a padding block is
 544   // inserted at the end to ensure that fields from the subclasses won't share
 545   // the cache line of the last field of the contended class
 546   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 547     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 548     p->set_offset(b->offset() + b->size());
 549     b->set_next_block(p);
 550     p->set_prev_block(b);
 551     b = p;
 552   }
 553 
 554   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 555   last->set_offset(b->offset() + b->size());
 556   assert(last->offset() > 0, "Sanity check");
 557   b->set_next_block(last);
 558   last->set_prev_block(b);
 559   _last = last;
 560 }
 561 
 562 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 563   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 564   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 565   block->set_offset(slot->offset());
 566   slot->set_offset(slot->offset() + block->size());
 567   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 568   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 569   slot->set_size(slot->size() - block->size());
 570   block->set_prev_block(slot->prev_block());
 571   block->set_next_block(slot);
 572   slot->set_prev_block(block);
 573   if (block->prev_block() != nullptr) {
 574     block->prev_block()->set_next_block(block);
 575   }
 576   if (_blocks == slot) {
 577     _blocks = block;
 578   }
 579   if (_start == slot) {
 580     _start = block;
 581   }
 582   return block;
 583 }
 584 
 585 void FieldLayout::remove(LayoutRawBlock* block) {
 586   assert(block != nullptr, "Sanity check");
 587   assert(block != _last, "Sanity check");
 588   if (_blocks == block) {
 589     _blocks = block->next_block();
 590     if (_blocks != nullptr) {
 591       _blocks->set_prev_block(nullptr);
 592     }
 593   } else {
 594     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 595     block->prev_block()->set_next_block(block->next_block());
 596     block->next_block()->set_prev_block(block->prev_block());
 597   }
 598   if (block == _start) {
 599     _start = block->prev_block();
 600   }
 601 }
 602 
 603 void FieldLayout::shift_fields(int shift) {
 604   LayoutRawBlock* b = first_field_block();
 605   LayoutRawBlock* previous = b->prev_block();
 606   if (previous->block_kind() == LayoutRawBlock::EMPTY) {
 607     previous->set_size(previous->size() + shift);
 608   } else {
 609     LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
 610     nb->set_offset(b->offset());
 611     previous->set_next_block(nb);
 612     nb->set_prev_block(previous);
 613     b->set_prev_block(nb);
 614     nb->set_next_block(b);
 615   }
 616   while (b != nullptr) {
 617     b->set_offset(b->offset() + shift);
 618     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
 619       _field_info->adr_at(b->field_index())->set_offset(b->offset());
 620       if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
 621         int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
 622         _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 623         _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 624       }
 625     }
 626     assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
 627     b = b->next_block();
 628   }
 629 }
 630 
 631 LayoutRawBlock* FieldLayout::find_null_marker() {
 632   LayoutRawBlock* b = _blocks;
 633   while (b != nullptr) {
 634     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 635       return b;
 636     }
 637     b = b->next_block();
 638   }
 639   ShouldNotReachHere();
 640 }
 641 
 642 void FieldLayout::remove_null_marker() {
 643   LayoutRawBlock* b = first_field_block();
 644   while (b != nullptr) {
 645     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 646       if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
 647         LayoutRawBlock* n = b->next_block();
 648         remove(b);
 649         n->set_offset(b->offset());
 650         n->set_size(n->size() + b->size());
 651       } else {
 652         b->set_block_kind(LayoutRawBlock::EMPTY);
 653       }
 654       return;
 655     }
 656     b = b->next_block();
 657   }
 658   ShouldNotReachHere(); // if we reach this point, the null marker was not found!
 659 }
 660 
 661 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields, bool dummy_field_is_reused_as_null_marker) {
 662   ResourceMark rm;
 663   LayoutRawBlock* b = _blocks;
 664   while(b != _last) {
 665     switch(b->block_kind()) {
 666       case LayoutRawBlock::REGULAR: {
 667         FieldInfo* fi = _field_info->adr_at(b->field_index());
 668         output->print(" @%d %s %d/%d \"%s\" %s",
 669                       b->offset(),
 670                       "REGULAR",
 671                       b->size(),
 672                       b->alignment(),
 673                       fi->name(_cp)->as_C_string(),
 674                       fi->signature(_cp)->as_C_string());
 675 
 676         if (dummy_field_is_reused_as_null_marker) {
 677           const bool is_dummy_field = fi->name(_cp)->fast_compare(vmSymbols::symbol_at(VM_SYMBOL_ENUM_NAME(empty_marker_name))) == 0;
 678           if (is_dummy_field) {
 679             output->print(" (reused as null-marker)");
 680           }
 681         }
 682 
 683         output->cr();
 684         break;
 685       }
 686       case LayoutRawBlock::FLAT: {
 687         FieldInfo* fi = _field_info->adr_at(b->field_index());
 688         InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
 689         assert(ik != nullptr, "");
 690         output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
 691                          b->offset(),
 692                          "FLAT",
 693                          b->size(),
 694                          b->alignment(),
 695                          fi->name(_cp)->as_C_string(),
 696                          fi->signature(_cp)->as_C_string(),
 697                          ik->name()->as_C_string(),
 698                          ik->class_loader_data(),
 699                          LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
 700         break;
 701       }
 702       case LayoutRawBlock::RESERVED: {
 703         output->print_cr(" @%d %s %d/-",
 704                          b->offset(),
 705                          "RESERVED",
 706                          b->size());
 707         break;
 708       }
 709       case LayoutRawBlock::INHERITED: {
 710         assert(!is_static, "Static fields are not inherited in layouts");
 711         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 712         bool found = false;
 713         const InstanceKlass* ik = super;
 714         while (!found && ik != nullptr) {
 715           for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 716             if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
 717               output->print_cr(" @%d %s %d/%d \"%s\" %s",
 718                   b->offset(),
 719                   "INHERITED",
 720                   b->size(),
 721                   b->alignment(),
 722                   fs.name()->as_C_string(),
 723                   fs.signature()->as_C_string());
 724               found = true;
 725               break;
 726             }
 727         }
 728         ik = ik->super();
 729       }
 730       break;
 731     }
 732     case LayoutRawBlock::EMPTY:
 733       output->print_cr(" @%d %s %d/1",
 734                        b->offset(),
 735                       "EMPTY",
 736                        b->size());
 737       break;
 738     case LayoutRawBlock::PADDING:
 739       output->print_cr(" @%d %s %d/1",
 740                       b->offset(),
 741                       "PADDING",
 742                       b->size());
 743       break;
 744     case LayoutRawBlock::NULL_MARKER:
 745     {
 746       output->print_cr(" @%d %s %d/1 ",
 747                       b->offset(),
 748                       "NULL_MARKER",
 749                       b->size());
 750       break;
 751     }
 752     default:
 753       fatal("Unknown block type");
 754     }
 755     b = b->next_block();
 756   }
 757 }
 758 
 759 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 760                                        GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
 761                                        bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
 762   _classname(classname),
 763   _loader_data(loader_data),
 764   _super_klass(super_klass),
 765   _constant_pool(constant_pool),
 766   _field_info(field_info),
 767   _info(info),
 768   _inline_layout_info_array(inline_layout_info_array),
 769   _root_group(nullptr),
 770   _contended_groups(GrowableArray<FieldGroup*>(8)),
 771   _static_fields(nullptr),
 772   _layout(nullptr),
 773   _static_layout(nullptr),
 774   _nonstatic_oopmap_count(0),
 775   _payload_alignment(-1),
 776   _payload_offset(-1),
 777   _null_marker_offset(-1),
 778   _payload_size_in_bytes(-1),
 779   _null_free_non_atomic_layout_size_in_bytes(-1),
 780   _null_free_non_atomic_layout_alignment(-1),
 781   _null_free_atomic_layout_size_in_bytes(-1),
 782   _nullable_atomic_layout_size_in_bytes(-1),
 783   _nullable_non_atomic_layout_size_in_bytes(-1),
 784   _fields_size_sum(0),
 785   _declared_nonstatic_fields_count(0),
 786   _has_non_naturally_atomic_fields(false),
 787   _is_naturally_atomic(false),
 788   _must_be_atomic(must_be_atomic),
 789   _has_nonstatic_fields(false),
 790   _has_inlineable_fields(false),
 791   _has_inlined_fields(false),
 792   _is_contended(is_contended),
 793   _is_inline_type(is_inline_type),
 794   _is_abstract_value(is_abstract_value),
 795   _is_empty_inline_class(false) {}
 796 
 797 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 798   assert(g > 0, "must only be called for named contended groups");
 799   FieldGroup* fg = nullptr;
 800   for (int i = 0; i < _contended_groups.length(); i++) {
 801     fg = _contended_groups.at(i);
 802     if (fg->contended_group() == g) return fg;
 803   }
 804   fg = new FieldGroup(g);
 805   _contended_groups.append(fg);
 806   return fg;
 807 }
 808 
 809 void FieldLayoutBuilder::prologue() {
 810   _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 811   const InstanceKlass* super_klass = _super_klass;
 812   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
 813   _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
 814   if (super_klass != nullptr) {
 815     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 816   }
 817   _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 818   _static_layout->initialize_static_layout();
 819   _static_fields = new FieldGroup();
 820   _root_group = new FieldGroup();
 821 }
 822 
 823 // Field sorting for regular (non-inline) classes:
 824 //   - fields are sorted in static and non-static fields
 825 //   - non-static fields are also sorted according to their contention group
 826 //     (support of the @Contended annotation)
 827 //   - @Contended annotation is ignored for static fields
 828 //   - field flattening decisions are taken in this method
 829 void FieldLayoutBuilder::regular_field_sorting() {
 830   int idx = 0;
 831   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 832     FieldGroup* group = nullptr;
 833     FieldInfo fieldinfo = *it;
 834     if (fieldinfo.access_flags().is_static()) {
 835       group = _static_fields;
 836     } else {
 837       _has_nonstatic_fields = true;
 838       if (fieldinfo.field_flags().is_contended()) {
 839         int g = fieldinfo.contended_group();
 840         if (g == 0) {
 841           group = new FieldGroup(true);
 842           _contended_groups.append(group);
 843         } else {
 844           group = get_or_create_contended_group(g);
 845         }
 846       } else {
 847         group = _root_group;
 848       }
 849     }
 850     assert(group != nullptr, "invariant");
 851     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 852     switch(type) {
 853     case T_BYTE:
 854     case T_CHAR:
 855     case T_DOUBLE:
 856     case T_FLOAT:
 857     case T_INT:
 858     case T_LONG:
 859     case T_SHORT:
 860     case T_BOOLEAN:
 861       group->add_primitive_field(idx, type);
 862       break;
 863     case T_OBJECT:
 864     case T_ARRAY:
 865     {
 866       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
 867 
 868       if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
 869         _has_inlineable_fields = true;
 870       }
 871 
 872       if (lk == LayoutKind::REFERENCE) {
 873         if (group != _static_fields) _nonstatic_oopmap_count++;
 874         group->add_oop_field(idx);
 875       } else {
 876         assert(group != _static_fields, "Static fields are not flattened");
 877         assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
 878                "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
 879 
 880         const int field_index = (int)fieldinfo.index();
 881         assert(_inline_layout_info_array != nullptr, "Array must have been created");
 882         assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
 883         _has_inlined_fields = true;
 884         InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
 885         group->add_flat_field(idx, vk, lk);
 886         _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
 887         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 888         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 889         _field_info->adr_at(idx)->set_layout_kind(lk);
 890         // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
 891       }
 892       break;
 893     }
 894     default:
 895       fatal("Something wrong?");
 896     }
 897   }
 898   _root_group->sort_by_size();
 899   _static_fields->sort_by_size();
 900   if (!_contended_groups.is_empty()) {
 901     for (int i = 0; i < _contended_groups.length(); i++) {
 902       _contended_groups.at(i)->sort_by_size();
 903     }
 904   }
 905 }
 906 
 907 /* Field sorting for inline classes:
 908  *   - because inline classes are immutable, the @Contended annotation is ignored
 909  *     when computing their layout (with only read operation, there's no false
 910  *     sharing issue)
 911  *   - this method also records the alignment of the field with the most
 912  *     constraining alignment, this value is then used as the alignment
 913  *     constraint when flattening this inline type into another container
 914  *   - field flattening decisions are taken in this method (those decisions are
 915  *     currently only based in the size of the fields to be flattened, the size
 916  *     of the resulting instance is not considered)
 917  */
 918 void FieldLayoutBuilder::inline_class_field_sorting() {
 919   assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
 920   int alignment = -1;
 921   int idx = 0;
 922   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 923     FieldGroup* group = nullptr;
 924     FieldInfo fieldinfo = *it;
 925     int field_alignment = 1;
 926     if (fieldinfo.access_flags().is_static()) {
 927       group = _static_fields;
 928     } else {
 929       _has_nonstatic_fields = true;
 930       _declared_nonstatic_fields_count++;
 931       group = _root_group;
 932     }
 933     assert(group != nullptr, "invariant");
 934     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 935     switch(type) {
 936     case T_BYTE:
 937     case T_CHAR:
 938     case T_DOUBLE:
 939     case T_FLOAT:
 940     case T_INT:
 941     case T_LONG:
 942     case T_SHORT:
 943     case T_BOOLEAN:
 944       if (group != _static_fields) {
 945         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 946       }
 947       group->add_primitive_field(idx, type);
 948       break;
 949     case T_OBJECT:
 950     case T_ARRAY:
 951     {
 952       bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
 953       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
 954 
 955       if (field_is_inlineable(fieldinfo, lk, _inline_layout_info_array)) {
 956         _has_inlineable_fields = true;
 957       }
 958 
 959       if (lk == LayoutKind::REFERENCE) {
 960         if (group != _static_fields) {
 961           _nonstatic_oopmap_count++;
 962           field_alignment = type2aelembytes(type); // alignment == size for oops
 963         }
 964         group->add_oop_field(idx);
 965       } else {
 966         assert(group != _static_fields, "Static fields are not flattened");
 967         assert(lk != LayoutKind::BUFFERED && lk != LayoutKind::UNKNOWN,
 968                "Invalid layout kind for flat field: %s", LayoutKindHelper::layout_kind_as_string(lk));
 969 
 970         const int field_index = (int)fieldinfo.index();
 971         assert(_inline_layout_info_array != nullptr, "Array must have been created");
 972         assert(_inline_layout_info_array->adr_at(field_index)->klass() != nullptr, "Klass must have been set");
 973         _has_inlined_fields = true;
 974         InlineKlass* vk = _inline_layout_info_array->adr_at(field_index)->klass();
 975         if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
 976         group->add_flat_field(idx, vk, lk);
 977         _inline_layout_info_array->adr_at(field_index)->set_kind(lk);
 978         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 979         field_alignment = vk->layout_alignment(lk);
 980         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 981         _field_info->adr_at(idx)->set_layout_kind(lk);
 982       }
 983       break;
 984     }
 985     default:
 986       fatal("Unexpected BasicType");
 987     }
 988     if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 989   }
 990   _root_group->sort_by_size();
 991   _static_fields->sort_by_size();
 992   _payload_alignment = alignment;
 993   assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
 994 }
 995 
 996 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 997   if (ContendedPaddingWidth > 0) {
 998     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 999     _layout->insert(slot, padding);
1000   }
1001 }
1002 
1003 // Computation of regular classes layout is an evolution of the previous default layout
1004 // (FieldAllocationStyle 1):
1005 //   - primitive fields (both primitive types and flat inline types) are allocated
1006 //     first (from the biggest to the smallest)
1007 //   - oop fields are allocated, either in existing gaps or at the end of
1008 //     the layout. We allocate oops in a single block to have a single oop map entry.
1009 //   - if the super class ended with an oop, we lead with oops. That will cause the
1010 //     trailing oop map entry of the super class and the oop map entry of this class
1011 //     to be folded into a single entry later. Correspondingly, if the super class
1012 //     ends with a primitive field, we gain nothing by leading with oops; therefore
1013 //     we let oop fields trail, thus giving future derived classes the chance to apply
1014 //     the same trick.
1015 void FieldLayoutBuilder::compute_regular_layout() {
1016   bool need_tail_padding = false;
1017   prologue();
1018   regular_field_sorting();
1019   if (_is_contended) {
1020     _layout->set_start(_layout->last_block());
1021     // insertion is currently easy because the current strategy doesn't try to fill holes
1022     // in super classes layouts => the _start block is by consequence the _last_block
1023     insert_contended_padding(_layout->start());
1024     need_tail_padding = true;
1025   }
1026 
1027   if (_super_ends_with_oop) {
1028     _layout->add(_root_group->oop_fields());
1029     _layout->add(_root_group->big_primitive_fields());
1030     _layout->add(_root_group->small_primitive_fields());
1031   } else {
1032     _layout->add(_root_group->big_primitive_fields());
1033     _layout->add(_root_group->small_primitive_fields());
1034     _layout->add(_root_group->oop_fields());
1035   }
1036 
1037   if (!_contended_groups.is_empty()) {
1038     for (int i = 0; i < _contended_groups.length(); i++) {
1039       FieldGroup* cg = _contended_groups.at(i);
1040       LayoutRawBlock* start = _layout->last_block();
1041       insert_contended_padding(start);
1042       _layout->add(cg->big_primitive_fields());
1043       _layout->add(cg->small_primitive_fields(), start);
1044       _layout->add(cg->oop_fields(), start);
1045       need_tail_padding = true;
1046     }
1047   }
1048 
1049   if (need_tail_padding) {
1050     insert_contended_padding(_layout->last_block());
1051   }
1052 
1053   // Warning: IntanceMirrorKlass expects static oops to be allocated first
1054   _static_layout->add_contiguously(_static_fields->oop_fields());
1055   _static_layout->add(_static_fields->big_primitive_fields());
1056   _static_layout->add(_static_fields->small_primitive_fields());
1057 
1058   epilogue();
1059 }
1060 
1061 /* Computation of inline classes has a slightly different strategy than for
1062  * regular classes. Regular classes have their oop fields allocated at the end
1063  * of the layout to increase GC performances. Unfortunately, this strategy
1064  * increases the number of empty slots inside an instance. Because the purpose
1065  * of inline classes is to be embedded into other containers, it is critical
1066  * to keep their size as small as possible. For this reason, the allocation
1067  * strategy is:
1068  *   - big primitive fields (primitive types and flat inline types larger
1069  *     than an oop) are allocated first (from the biggest to the smallest)
1070  *   - then oop fields
1071  *   - then small primitive fields (from the biggest to the smallest)
1072  */
1073 void FieldLayoutBuilder::compute_inline_class_layout() {
1074 
1075   // Test if the concrete inline class is an empty class (no instance fields)
1076   // and insert a dummy field if needed
1077   if (!_is_abstract_value) {
1078     bool declares_nonstatic_fields = false;
1079     for (FieldInfo fieldinfo : *_field_info) {
1080       if (!fieldinfo.access_flags().is_static()) {
1081         declares_nonstatic_fields = true;
1082         break;
1083       }
1084     }
1085 
1086     if (!declares_nonstatic_fields) {
1087       bool has_inherited_fields = _super_klass != nullptr && _super_klass->has_nonstatic_fields();
1088       if (!has_inherited_fields) {
1089         // Inject ".empty" dummy field
1090         _is_empty_inline_class = true;
1091         FieldInfo::FieldFlags fflags(0);
1092         fflags.update_injected(true);
1093         AccessFlags aflags;
1094         FieldInfo fi(aflags,
1095                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1096                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1097                     0,
1098                     fflags);
1099         int idx = _field_info->append(fi);
1100         _field_info->adr_at(idx)->set_index(idx);
1101       }
1102     }
1103   }
1104 
1105   prologue();
1106   inline_class_field_sorting();
1107 
1108   assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1109 
1110   if (!_layout->super_has_nonstatic_fields()) {
1111     // No inherited fields, the layout must be empty except for the RESERVED block
1112     // PADDING is inserted if needed to ensure the correct alignment of the payload.
1113     if (_is_abstract_value && _has_nonstatic_fields) {
1114       // non-static fields of the abstract class must be laid out without knowning
1115       // the alignment constraints of the fields of the sub-classes, so the worst
1116       // case scenario is assumed, which is currently the alignment of T_LONG.
1117       // PADDING is added if needed to ensure the payload will respect this alignment.
1118       _payload_alignment = type2aelembytes(BasicType::T_LONG);
1119     }
1120     assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY, "Unexpected");
1121     LayoutRawBlock* first_empty = _layout->start()->next_block();
1122     if (first_empty->offset() % _payload_alignment != 0) {
1123       LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1124       _layout->insert(first_empty, padding);
1125       if (first_empty->size() == 0) {
1126         _layout->remove(first_empty);
1127       }
1128       _layout->set_start(padding);
1129     }
1130   } else { // the class has inherited some fields from its super(s)
1131     if (!_is_abstract_value) {
1132       // This is the step where the layout of the final concrete value class' layout
1133       // is computed. Super abstract value classes might have been too conservative
1134       // regarding alignment constraints, but now that the full set of non-static fields is
1135       // known, compute which alignment to use, then set first allowed field offset
1136 
1137       assert(_has_nonstatic_fields, "Concrete value classes must have at least one field");
1138       if (_payload_alignment == -1) { // current class declares no local nonstatic fields
1139         _payload_alignment = _layout->super_min_align_required();
1140       }
1141 
1142       assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1143       assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1144 
1145       if (_payload_alignment < _layout->super_alignment()) {
1146         int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1147         assert(new_alignment % _payload_alignment == 0, "Must be");
1148         assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1149         _payload_alignment = new_alignment;
1150       }
1151       _layout->set_start(_layout->first_field_block());
1152     }
1153   }
1154 
1155   _layout->add(_root_group->big_primitive_fields());
1156   _layout->add(_root_group->oop_fields());
1157   _layout->add(_root_group->small_primitive_fields());
1158 
1159   LayoutRawBlock* first_field = _layout->first_field_block();
1160   if (first_field != nullptr) {
1161     _payload_offset = _layout->first_field_block()->offset();
1162     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1163   } else {
1164     assert(_is_abstract_value, "Concrete inline types must have at least one field");
1165     _payload_offset = _layout->blocks()->size();
1166     _payload_size_in_bytes = 0;
1167   }
1168 
1169   // Determining if the value class is naturally atomic:
1170   if ((!_layout->super_has_nonstatic_fields() && _declared_nonstatic_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1171       || (_layout->super_has_nonstatic_fields() && _super_klass->is_naturally_atomic() && _declared_nonstatic_fields_count == 0)) {
1172         _is_naturally_atomic = true;
1173   }
1174 
1175   // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1176   // From this, additional layouts will be computed: atomic and nullable layouts
1177   // Once those additional layouts are computed, the raw layout might need some adjustments
1178 
1179   bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1180 
1181   if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1182     // Validation of the non atomic layout
1183     if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1184       _null_free_non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1185       _null_free_non_atomic_layout_alignment = _payload_alignment;
1186     }
1187 
1188     // Next step is to compute the characteristics for a layout enabling atomic updates
1189     if (UseAtomicValueFlattening) {
1190       int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1191       if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1192         _null_free_atomic_layout_size_in_bytes = atomic_size;
1193       }
1194     }
1195 
1196     // Next step is the nullable layouts: they must include a null marker
1197     if (UseNullableValueFlattening || UseNullableNonAtomicValueFlattening) {
1198       // Looking if there's an empty slot inside the layout that could be used to store a null marker
1199       LayoutRawBlock* b = _layout->first_field_block();
1200       assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1201       int null_marker_offset = -1;
1202       if (_is_empty_inline_class) {
1203         // Reusing the dummy field as a field marker
1204         assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1205         null_marker_offset = b->offset();
1206       } else {
1207         while (b != _layout->last_block()) {
1208           if (b->block_kind() == LayoutRawBlock::EMPTY) {
1209             break;
1210           }
1211           b = b->next_block();
1212         }
1213         if (b != _layout->last_block()) {
1214           // found an empty slot, register its offset from the beginning of the payload
1215           null_marker_offset = b->offset();
1216           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1217           _layout->add_field_at_offset(marker, b->offset());
1218         }
1219         if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1220           int last_offset = _layout->last_block()->offset();
1221           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1222           _layout->insert_field_block(_layout->last_block(), marker);
1223           assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1224           null_marker_offset = marker->offset();
1225         }
1226       }
1227       assert(null_marker_offset != -1, "Sanity check");
1228       // Now that the null marker is there, the size of the nullable layout must computed
1229       int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1230       if (UseNullableNonAtomicValueFlattening) {
1231         _nullable_non_atomic_layout_size_in_bytes = new_raw_size;
1232         _null_marker_offset = null_marker_offset;
1233         _null_free_non_atomic_layout_alignment = _payload_alignment;
1234       }
1235       if (UseNullableValueFlattening) {
1236         // For the nullable atomic layout, the size mut be compatible with the platform capabilities
1237         int nullable_atomic_size = round_up_power_of_2(new_raw_size);
1238         if (nullable_atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1239           _nullable_atomic_layout_size_in_bytes = nullable_atomic_size;
1240           _null_marker_offset = null_marker_offset;
1241         }
1242       }
1243       if (_null_marker_offset == -1) { // No nullable layout has been accepted
1244         // If the nullable layout is rejected, the NULL_MARKER block should be removed
1245         // from the layout, otherwise it will appear anyway if the layout is printer
1246         if (!_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1247           _layout->remove_null_marker();
1248         }
1249       }
1250     }
1251     // If the inline class has an atomic or nullable atomic layout,
1252     // we want the raw layout to have the same alignment as those atomic layouts so access codes
1253     // could remain simple (single instruction without intermediate copy). This might required
1254     // to shift all fields in the raw layout, but this operation is possible only if the class
1255     // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1256     // field shift is needed but not possible, all atomic layouts are disabled and only reference
1257     // and loosely consistent are supported.
1258     int required_alignment = _payload_alignment;
1259     if (has_null_free_atomic_layout() && required_alignment < null_free_atomic_layout_size_in_bytes()) {
1260       required_alignment = null_free_atomic_layout_size_in_bytes();
1261     }
1262     if (has_nullable_atomic_layout() && required_alignment < nullable_atomic_layout_size_in_bytes()) {
1263       required_alignment = nullable_atomic_layout_size_in_bytes();
1264     }
1265     int shift = first_field->offset() % required_alignment;
1266     if (shift != 0) {
1267       if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1268         assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1269         _layout->shift_fields(shift);
1270         _payload_offset = _layout->first_field_block()->offset();
1271         if (has_nullable_atomic_layout() || has_nullable_non_atomic_layout()) {
1272           assert(!_is_empty_inline_class, "Should not get here with empty values");
1273           _null_marker_offset = _layout->find_null_marker()->offset();
1274         }
1275         _payload_alignment = required_alignment;
1276       } else {
1277         _null_free_atomic_layout_size_in_bytes = -1;
1278         if (has_nullable_atomic_layout() && !has_nullable_non_atomic_layout() && !_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1279           _layout->remove_null_marker();
1280           _null_marker_offset = -1;
1281         }
1282         _nullable_atomic_layout_size_in_bytes = -1;
1283       }
1284     } else {
1285       _payload_alignment = required_alignment;
1286     }
1287 
1288     // If the inline class has a nullable layout, the layout used in heap allocated standalone
1289     // instances must also be the nullable layout, in order to be able to set the null marker to
1290     // non-null before copying the payload to other containers.
1291     if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_atomic_layout_size_in_bytes()) {
1292       _payload_size_in_bytes = nullable_atomic_layout_size_in_bytes();
1293     }
1294     if (has_nullable_non_atomic_layout() && payload_layout_size_in_bytes() < nullable_non_atomic_layout_size_in_bytes()) {
1295       _payload_size_in_bytes = nullable_non_atomic_layout_size_in_bytes();
1296     }
1297 
1298     // if the inline class has a null-free atomic layout, the the layout used in heap allocated standalone
1299     // instances must have at least equal to the atomic layout to allow safe read/write atomic
1300     // operation
1301     if (has_null_free_atomic_layout() && payload_layout_size_in_bytes() < null_free_atomic_layout_size_in_bytes()) {
1302       _payload_size_in_bytes = null_free_atomic_layout_size_in_bytes();
1303     }
1304   }
1305   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1306   _static_layout->add_contiguously(_static_fields->oop_fields());
1307   _static_layout->add(_static_fields->big_primitive_fields());
1308   _static_layout->add(_static_fields->small_primitive_fields());
1309 
1310   generate_acmp_maps();
1311   epilogue();
1312 }
1313 
1314 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1315                 InlineKlass* vklass, int offset) {
1316   int diff = offset - vklass->payload_offset();
1317   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1318   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1319   while (map < last_map) {
1320     nonstatic_oop_maps->add(map->offset() + diff, map->count());
1321     map++;
1322   }
1323 }
1324 
1325 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1326   if (list == nullptr) return;
1327   for (int i = 0; i < list->length(); i++) {
1328     LayoutRawBlock* f = list->at(i);
1329     if (f->block_kind() == LayoutRawBlock::FLAT) {
1330       InlineKlass* vk = f->inline_klass();
1331       assert(vk != nullptr, "Should have been initialized");
1332       if (vk->contains_oops()) {
1333         add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1334       }
1335     }
1336   }
1337 }
1338 
1339 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1340   if (group->oop_fields() != nullptr) {
1341     for (int i = 0; i < group->oop_fields()->length(); i++) {
1342       LayoutRawBlock* b = group->oop_fields()->at(i);
1343       nonstatic_oop_maps->add(b->offset(), 1);
1344     }
1345   }
1346   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1347 }
1348 
1349 static int insert_segment(GrowableArray<Pair<int,int>>* map, int offset, int size, int last_idx) {
1350   if (map->is_empty()) {
1351     return map->append(Pair<int,int>(offset, size));
1352   }
1353   last_idx = last_idx == -1 ? 0 : last_idx;
1354   int start = map->adr_at(last_idx)->first > offset ? 0 : last_idx;
1355   bool inserted = false;
1356   for (int c = start; c < map->length(); c++) {
1357     if (offset == (map->adr_at(c)->first + map->adr_at(c)->second)) {
1358       //contiguous to the last field, can be coalesced
1359       map->adr_at(c)->second = map->adr_at(c)->second + size;
1360       inserted = true;
1361       break;  // break out of the for loop
1362     }
1363     if (offset < (map->adr_at(c)->first)) {
1364       map->insert_before(c, Pair<int,int>(offset, size));
1365       last_idx = c;
1366       inserted = true;
1367       break;  // break out of the for loop
1368     }
1369   }
1370   if (!inserted) {
1371     last_idx = map->append(Pair<int,int>(offset, size));
1372   }
1373   return last_idx;
1374 }
1375 
1376 static int insert_map_at_offset(GrowableArray<Pair<int,int>>* nonoop_map, GrowableArray<int>* oop_map,
1377                                 const InstanceKlass* ik, int offset, int payload_offset, int last_idx) {
1378   oop mirror = ik->java_mirror();
1379   oop array = mirror->obj_field(ik->acmp_maps_offset());
1380   assert(array != nullptr, "Sanity check");
1381   typeArrayOop fmap = (typeArrayOop)array;
1382   typeArrayHandle fmap_h(Thread::current(), fmap);
1383   int nb_nonoop_field = fmap_h->int_at(0);
1384   int field_offset = offset - payload_offset;
1385   for (int i = 0; i < nb_nonoop_field; i++) {
1386     last_idx = insert_segment(nonoop_map,
1387                               field_offset + fmap_h->int_at( i * 2 + 1),
1388                               fmap_h->int_at( i * 2 + 2), last_idx);
1389   }
1390   int len = fmap_h->length();
1391   for (int i = nb_nonoop_field * 2 + 1; i < len; i++) {
1392       oop_map->append(field_offset + fmap_h->int_at(i));
1393   }
1394   return last_idx;
1395 }
1396 
1397 static void split_after(GrowableArray<Pair<int,int>>* map, int idx, int head) {
1398   int offset = map->adr_at(idx)->first;
1399   int size = map->adr_at(idx)->second;
1400   if (size <= head) return;
1401   map->adr_at(idx)->first = offset + head;
1402   map->adr_at(idx)->second = size - head;
1403   map->insert_before(idx, Pair<int,int>(offset, head));
1404 
1405 }
1406 
1407 void FieldLayoutBuilder::generate_acmp_maps() {
1408   assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1409 
1410   // create/initialize current class' maps
1411   // The Pair<int,int> values in the nonoop_acmp_map represent <offset,size> segments of memory
1412   _nonoop_acmp_map = new GrowableArray<Pair<int,int>>();
1413   _oop_acmp_map = new GrowableArray<int>();
1414   if (_is_empty_inline_class) return;
1415   // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1416   // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1417   // next to the previous insertion. However, in some cases local fields and inherited fields can be
1418   // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1419   int last_idx = -1;
1420   if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) {  // Assumes j.l.Object cannot have fields
1421     last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, 0, last_idx);
1422   }
1423 
1424   // Processing local fields
1425   LayoutRawBlock* b = _layout->blocks();
1426   while(b != _layout->last_block()) {
1427     switch(b->block_kind()) {
1428       case LayoutRawBlock::RESERVED:
1429       case LayoutRawBlock::EMPTY:
1430       case LayoutRawBlock::PADDING:
1431       case LayoutRawBlock::NULL_MARKER:
1432       case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1433         // skip
1434         break;
1435 
1436       case LayoutRawBlock::REGULAR:
1437         {
1438           FieldInfo* fi = _field_info->adr_at(b->field_index());
1439           if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1440             _oop_acmp_map->append(b->offset());
1441           } else {
1442             // Non-oop case
1443             last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1444           }
1445           break;
1446        }
1447       case LayoutRawBlock::FLAT:
1448         {
1449           InlineKlass* vk = b->inline_klass();
1450           last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, b->offset(), vk->payload_offset(), last_idx);
1451           if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1452             int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1453             last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1454             // Important note: the implementation assumes that for nullable flat fields, if the
1455             // null marker is zero (field is null), then all the fields of the flat field are also
1456             // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1457             // all fields are included in the map, plus the null marker
1458             // If it happens that the assumption above is wrong, then nullable flat fields would
1459             // require a dedicated section in the acmp map, and be handled differently: null_marker
1460             // comparison first, and if null markers are identical and non-zero, then conditional
1461             // comparison of the other fields
1462           }
1463         }
1464         break;
1465 
1466     }
1467     b = b->next_block();
1468   }
1469 
1470   // split segments into well-aligned blocks
1471   int idx = 0;
1472   while (idx < _nonoop_acmp_map->length()) {
1473     int offset = _nonoop_acmp_map->adr_at(idx)->first;
1474     int size = _nonoop_acmp_map->adr_at(idx)->second;
1475     int mod = offset % 8;
1476     switch (mod) {
1477       case 0:
1478         break;
1479       case 4:
1480         split_after(_nonoop_acmp_map, idx, 4);
1481         break;
1482       case 2:
1483       case 6:
1484         split_after(_nonoop_acmp_map, idx, 2);
1485         break;
1486       case 1:
1487       case 3:
1488       case 5:
1489       case 7:
1490         split_after(_nonoop_acmp_map, idx, 1);
1491         break;
1492       default:
1493         ShouldNotReachHere();
1494     }
1495     idx++;
1496   }
1497 }
1498 
1499 void FieldLayoutBuilder::epilogue() {
1500   // Computing oopmaps
1501   OopMapBlocksBuilder* nonstatic_oop_maps =
1502       new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1503   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1504   if (super_oop_map_count > 0) {
1505     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1506     _super_klass->nonstatic_oop_map_count());
1507   }
1508   register_embedded_oops(nonstatic_oop_maps, _root_group);
1509   if (!_contended_groups.is_empty()) {
1510     for (int i = 0; i < _contended_groups.length(); i++) {
1511       FieldGroup* cg = _contended_groups.at(i);
1512       if (cg->oop_count() > 0) {
1513         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1514         register_embedded_oops(nonstatic_oop_maps, cg);
1515       }
1516     }
1517   }
1518   nonstatic_oop_maps->compact();
1519 
1520   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1521   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1522   int static_fields_size = (static_fields_end -
1523       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1524   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1525 
1526   // Pass back information needed for InstanceKlass creation
1527 
1528   _info->oop_map_blocks = nonstatic_oop_maps;
1529   _info->_instance_size = align_object_size(instance_end / wordSize);
1530   _info->_static_field_size = static_fields_size;
1531   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1532   _info->_has_nonstatic_fields = _has_nonstatic_fields;
1533   _info->_has_inlined_fields = _has_inlined_fields;
1534   _info->_is_naturally_atomic = _is_naturally_atomic;
1535   if (_is_inline_type) {
1536     _info->_must_be_atomic = _must_be_atomic;
1537     _info->_payload_alignment = _payload_alignment;
1538     _info->_payload_offset = _payload_offset;
1539     _info->_payload_size_in_bytes = _payload_size_in_bytes;
1540     _info->_null_free_non_atomic_size_in_bytes = _null_free_non_atomic_layout_size_in_bytes;
1541     _info->_null_free_non_atomic_alignment = _null_free_non_atomic_layout_alignment;
1542     _info->_null_free_atomic_layout_size_in_bytes = _null_free_atomic_layout_size_in_bytes;
1543     _info->_nullable_atomic_layout_size_in_bytes = _nullable_atomic_layout_size_in_bytes;
1544     _info->_nullable_non_atomic_layout_size_in_bytes = _nullable_non_atomic_layout_size_in_bytes;
1545     _info->_null_marker_offset = _null_marker_offset;
1546     _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1547     _info->_is_empty_inline_klass = _is_empty_inline_class;
1548   }
1549 
1550   // Acmp maps are needed for both concrete and abstract value classes
1551   if (_is_inline_type || _is_abstract_value) {
1552     _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1553     _info->_nonoop_acmp_map = _nonoop_acmp_map;
1554     _info->_oop_acmp_map = _oop_acmp_map;
1555   }
1556 
1557   // This may be too restrictive, since if all the fields fit in 64
1558   // bits we could make the decision to align instances of this class
1559   // to 64-bit boundaries, and load and store them as single words.
1560   // And on machines which supported larger atomics we could similarly
1561   // allow larger values to be atomic, if properly aligned.
1562 
1563 #ifdef ASSERT
1564   // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1565   // which prints the details of LayoutRawBlocks used to compute the layout.
1566   // The code below checks that offsets in the _field_info meta-data match offsets
1567   // in the LayoutRawBlocks
1568   LayoutRawBlock* b = _layout->blocks();
1569   while(b != _layout->last_block()) {
1570     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1571       if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1572         tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1573       }
1574       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1575     }
1576     b = b->next_block();
1577   }
1578   b = _static_layout->blocks();
1579   while(b != _static_layout->last_block()) {
1580     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1581       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1582     }
1583     b = b->next_block();
1584   }
1585 #endif // ASSERT
1586 
1587   static bool first_layout_print = true;
1588 
1589   if (PrintFieldLayout || (PrintInlineLayout && (_has_inlineable_fields || _is_inline_type || _is_abstract_value))) {
1590     ResourceMark rm;
1591     stringStream st;
1592     if (first_layout_print) {
1593       st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1594       st.print_cr("Heap oop size = %d", heapOopSize);
1595       first_layout_print = false;
1596     }
1597     if (_super_klass != nullptr) {
1598       st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1599                     _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1600     } else {
1601       st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1602     }
1603     st.print_cr("Instance fields:");
1604     const bool dummy_field_is_reused_as_null_marker = _is_empty_inline_class && _null_marker_offset != -1;
1605     _layout->print(&st, false, _super_klass, _inline_layout_info_array, dummy_field_is_reused_as_null_marker);
1606     st.print_cr("Static fields:");
1607     _static_layout->print(&st, true, nullptr, _inline_layout_info_array, false);
1608     st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1609     if (_is_inline_type) {
1610       st.print_cr("First field offset = %d", _payload_offset);
1611       st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1612                   _payload_size_in_bytes, _payload_alignment);
1613       if (has_null_free_non_atomic_flat_layout()) {
1614         st.print_cr("%s layout: %d/%d",
1615                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1616                     _null_free_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1617       } else {
1618         st.print_cr("%s layout: -/-",
1619                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1620       }
1621       if (has_null_free_atomic_layout()) {
1622         st.print_cr("%s layout: %d/%d",
1623                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1624                     _null_free_atomic_layout_size_in_bytes, _null_free_atomic_layout_size_in_bytes);
1625       } else {
1626         st.print_cr("%s layout: -/-",
1627                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1628       }
1629       if (has_nullable_atomic_layout()) {
1630         st.print_cr("%s layout: %d/%d",
1631                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1632                     _nullable_atomic_layout_size_in_bytes, _nullable_atomic_layout_size_in_bytes);
1633       } else {
1634         st.print_cr("%s layout: -/-",
1635                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1636       }
1637       if (has_nullable_non_atomic_layout()) {
1638         st.print_cr("%s layout: %d/%d",
1639                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT),
1640                     _nullable_non_atomic_layout_size_in_bytes, _null_free_non_atomic_layout_alignment);
1641       } else {
1642         st.print_cr("%s layout: -/-",
1643                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_NON_ATOMIC_FLAT));
1644       }
1645       if (_null_marker_offset != -1) {
1646         st.print_cr("Null marker offset = %d", _null_marker_offset);
1647       }
1648       st.print("Non-oop acmp map: ");
1649       for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1650         st.print("<%d,%d>, ", _nonoop_acmp_map->at(i).first,  _nonoop_acmp_map->at(i).second);
1651       }
1652       st.print_cr("");
1653       st.print("oop acmp map: ");
1654       for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1655         st.print("%d, ", _oop_acmp_map->at(i));
1656       }
1657       st.print_cr("");
1658     }
1659     st.print_cr("---");
1660     // Print output all together.
1661     tty->print_raw(st.as_string());
1662   }
1663 }
1664 
1665 void FieldLayoutBuilder::build_layout() {
1666   if (_is_inline_type || _is_abstract_value) {
1667     compute_inline_class_layout();
1668   } else {
1669     compute_regular_layout();
1670   }
1671 }