< prev index next >

src/hotspot/share/classfile/fieldLayoutBuilder.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"


  27 #include "jvm.h"
  28 #include "memory/resourceArea.hpp"
  29 #include "oops/array.hpp"
  30 #include "oops/fieldStreams.inline.hpp"
  31 #include "oops/instanceMirrorKlass.hpp"
  32 #include "oops/instanceKlass.inline.hpp"
  33 #include "oops/klass.inline.hpp"

  34 #include "runtime/fieldDescriptor.inline.hpp"

  35 




























































  36 
  37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
  38   _next_block(nullptr),
  39   _prev_block(nullptr),
  40   _kind(kind),


  41   _offset(-1),
  42   _alignment(1),
  43   _size(size),
  44   _field_index(-1),
  45   _is_reference(false) {
  46   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
  47          "Otherwise, should use the constructor with a field index argument");
  48   assert(size > 0, "Sanity check");
  49 }
  50 
  51 
  52 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
  53  _next_block(nullptr),
  54  _prev_block(nullptr),
  55  _kind(kind),


  56  _offset(-1),
  57  _alignment(alignment),
  58  _size(size),
  59  _field_index(index),
  60  _is_reference(is_reference) {
  61   assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
  62          "Other kind do not have a field index");
  63   assert(size > 0, "Sanity check");
  64   assert(alignment > 0, "Sanity check");
  65 }
  66 
  67 bool LayoutRawBlock::fit(int size, int alignment) {
  68   int adjustment = 0;
  69   if ((_offset % alignment) != 0) {
  70     adjustment = alignment - (_offset % alignment);
  71   }
  72   return _size >= size + adjustment;
  73 }
  74 
  75 FieldGroup::FieldGroup(int contended_group) :
  76   _next(nullptr),
  77   _primitive_fields(nullptr),

  78   _oop_fields(nullptr),
  79   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
  80   _oop_count(0) {}
  81 
  82 void FieldGroup::add_primitive_field(int idx, BasicType type) {
  83   int size = type2aelembytes(type);
  84   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
  85   if (_primitive_fields == nullptr) {
  86     _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);


  87   }
  88   _primitive_fields->append(block);
  89 }
  90 
  91 void FieldGroup::add_oop_field(int idx) {
  92   int size = type2aelembytes(T_OBJECT);
  93   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
  94   if (_oop_fields == nullptr) {
  95     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
  96   }
  97   _oop_fields->append(block);
  98   _oop_count++;
  99 }
 100 











 101 void FieldGroup::sort_by_size() {
 102   if (_primitive_fields != nullptr) {
 103     _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
















 104   }

 105 }
 106 
 107 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
 108   _field_info(field_info),

 109   _cp(cp),
 110   _blocks(nullptr),
 111   _start(_blocks),
 112   _last(_blocks) {}






 113 
 114 void FieldLayout::initialize_static_layout() {
 115   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 116   _blocks->set_offset(0);
 117   _last = _blocks;
 118   _start = _blocks;
 119   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 120   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 121   // of static field is computed. Field offsets are fixed later when the size is known
 122   // (see java_lang_Class::fixup_mirror())
 123   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 124     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 125     _blocks->set_offset(0);
 126   }
 127 }
 128 
 129 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 130   if (super_klass == nullptr) {
 131     super_ends_with_oop = false;
 132     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 133     _blocks->set_offset(0);
 134     _last = _blocks;
 135     _start = _blocks;
 136     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 137   } else {
 138     bool super_has_instance_fields = false;
 139     reconstruct_layout(super_klass, super_has_instance_fields, super_ends_with_oop);
 140     fill_holes(super_klass);
 141     if (!super_klass->has_contended_annotations() || !super_has_instance_fields) {
 142       _start = _blocks;  // start allocating fields from the first empty block
 143     } else {
 144       _start = _last;    // append fields at the end of the reconstructed layout
 145     }
 146   }
 147 }
 148 
 149 LayoutRawBlock* FieldLayout::first_field_block() {
 150   LayoutRawBlock* block = _start;
 151   while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
 152       && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {



 153     block = block->next_block();
 154   }
 155   return block;
 156 }
 157 
 158 
 159 // Insert a set of fields into a layout using a best-fit strategy.
 160 // For each field, search for the smallest empty slot able to fit the field
 161 // (satisfying both size and alignment requirements), if none is found,
 162 // add the field at the end of the layout.
 163 // Fields cannot be inserted before the block specified in the "start" argument
 164 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 165   if (list == nullptr) return;
 166   if (start == nullptr) start = this->_start;
 167   bool last_search_success = false;
 168   int last_size = 0;
 169   int last_alignment = 0;
 170   for (int i = 0; i < list->length(); i ++) {
 171     LayoutRawBlock* b = list->at(i);
 172     LayoutRawBlock* cursor = nullptr;
 173     LayoutRawBlock* candidate = nullptr;
 174 
 175     // if start is the last block, just append the field
 176     if (start == last_block()) {
 177       candidate = last_block();
 178     }
 179     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 180     // check if the previous field had the same requirements and if the search for a fitting slot
 181     // was successful. If the requirements were the same but the search failed, a new search will
 182     // fail the same way, so just append the field at the of the layout.
 183     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 184       candidate = last_block();
 185     } else {
 186       // Iterate over the layout to find an empty slot fitting the field's requirements
 187       last_size = b->size();
 188       last_alignment = b->alignment();
 189       cursor = last_block()->prev_block();
 190       assert(cursor != nullptr, "Sanity check");
 191       last_search_success = true;

 192       while (cursor != start) {
 193         if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 194           if (candidate == nullptr || cursor->size() < candidate->size()) {
 195             candidate = cursor;
 196           }
 197         }
 198         cursor = cursor->prev_block();
 199       }
 200       if (candidate == nullptr) {
 201         candidate = last_block();
 202         last_search_success = false;
 203       }
 204       assert(candidate != nullptr, "Candidate must not be null");
 205       assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 206       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 207     }
 208 
 209     insert_field_block(candidate, b);
 210   }
 211 }
 212 
 213 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 214 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 215   assert(block != nullptr, "Sanity check");
 216   block->set_offset(offset);
 217   if (start == nullptr) {
 218     start = this->_start;
 219   }
 220   LayoutRawBlock* slot = start;
 221   while (slot != nullptr) {
 222     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 223         slot == _last){
 224       assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 225       assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
 226       if (slot->offset() < block->offset()) {
 227         int adjustment = block->offset() - slot->offset();
 228         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 229         insert(slot, adj);
 230       }
 231       insert(slot, block);
 232       if (slot->size() == 0) {
 233         remove(slot);
 234       }
 235       _field_info->adr_at(block->field_index())->set_offset(block->offset());


 236       return;
 237     }
 238     slot = slot->next_block();
 239   }
 240   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 241 }
 242 
 243 // The allocation logic uses a best fit strategy: the set of fields is allocated
 244 // in the first empty slot big enough to contain the whole set ((including padding
 245 // to fit alignment constraints).
 246 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 247   if (list == nullptr) return;
 248   if (start == nullptr) {
 249     start = _start;
 250   }
 251   // This code assumes that if the first block is well aligned, the following
 252   // blocks would naturally be well aligned (no need for adjustment)
 253   int size = 0;
 254   for (int i = 0; i < list->length(); i++) {
 255     size += list->at(i)->size();
 256   }
 257 
 258   LayoutRawBlock* candidate = nullptr;
 259   if (start == last_block()) {
 260     candidate = last_block();
 261   } else {
 262     LayoutRawBlock* first = list->at(0);
 263     candidate = last_block()->prev_block();
 264     while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 265       if (candidate == start) {
 266         candidate = last_block();
 267         break;
 268       }
 269       candidate = candidate->prev_block();
 270     }
 271     assert(candidate != nullptr, "Candidate must not be null");
 272     assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 273     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 274   }
 275 
 276   for (int i = 0; i < list->length(); i++) {
 277     LayoutRawBlock* b = list->at(i);
 278     insert_field_block(candidate, b);
 279     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 280   }
 281 }
 282 
 283 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 284   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 285   if (slot->offset() % block->alignment() != 0) {
 286     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 287     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 288     insert(slot, adj);
 289   }

 290   insert(slot, block);
 291   if (slot->size() == 0) {
 292     remove(slot);
 293   }
 294   _field_info->adr_at(block->field_index())->set_offset(block->offset());












 295   return block;
 296 }
 297 
 298 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
 299   has_instance_fields = ends_with_oop = false;



 300   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 301   BasicType last_type;
 302   int last_offset = -1;
 303   while (ik != nullptr) {
 304     for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 305       BasicType type = Signature::basic_type(fs.signature());
 306       // distinction between static and non-static fields is missing
 307       if (fs.access_flags().is_static()) continue;
 308       has_instance_fields = true;





















 309       if (fs.offset() > last_offset) {
 310         last_offset = fs.offset();
 311         last_type = type;
 312       }
 313       int size = type2aelembytes(type);
 314       // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 315       LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
 316       block->set_offset(fs.offset());
 317       all_fields->append(block);
 318     }
 319     ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
 320   }
 321   assert(last_offset == -1 || last_offset > 0, "Sanity");
 322   if (last_offset > 0 &&
 323       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 324     ends_with_oop = true;
 325   }
 326 
 327   all_fields->sort(LayoutRawBlock::compare_offset);
 328   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 329   _blocks->set_offset(0);
 330   _last = _blocks;
 331 
 332   for(int i = 0; i < all_fields->length(); i++) {
 333     LayoutRawBlock* b = all_fields->at(i);
 334     _last->set_next_block(b);
 335     b->set_prev_block(_last);
 336     _last = b;
 337   }
 338   _start = _blocks;
 339 }
 340 
 341 // Called during the reconstruction of a layout, after fields from super
 342 // classes have been inserted. It fills unused slots between inserted fields
 343 // with EMPTY blocks, so the regular field insertion methods would work.
 344 // This method handles classes with @Contended annotations differently
 345 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 346 // fields to interfere with contended fields/classes.
 347 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 348   assert(_blocks != nullptr, "Sanity check");
 349   assert(_blocks->offset() == 0, "first block must be at offset zero");
 350   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 351   LayoutRawBlock* b = _blocks;
 352   while (b->next_block() != nullptr) {
 353     if (b->next_block()->offset() > (b->offset() + b->size())) {
 354       int size = b->next_block()->offset() - (b->offset() + b->size());

 355       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 356       empty->set_offset(b->offset() + b->size());
 357       empty->set_next_block(b->next_block());
 358       b->next_block()->set_prev_block(empty);
 359       b->set_next_block(empty);
 360       empty->set_prev_block(b);
 361     }
 362     b = b->next_block();
 363   }
 364   assert(b->next_block() == nullptr, "Invariant at this point");
 365   assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
 366 
 367   // If the super class has @Contended annotation, a padding block is
 368   // inserted at the end to ensure that fields from the subclasses won't share
 369   // the cache line of the last field of the contended class
 370   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 371     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 372     p->set_offset(b->offset() + b->size());
 373     b->set_next_block(p);
 374     p->set_prev_block(b);
 375     b = p;
 376   }
 377 
 378   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 379   last->set_offset(b->offset() + b->size());
 380   assert(last->offset() > 0, "Sanity check");
 381   b->set_next_block(last);
 382   last->set_prev_block(b);
 383   _last = last;
 384 }
 385 
 386 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 387   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 388   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 389   block->set_offset(slot->offset());
 390   slot->set_offset(slot->offset() + block->size());
 391   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 392   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 393   slot->set_size(slot->size() - block->size());
 394   block->set_prev_block(slot->prev_block());
 395   block->set_next_block(slot);
 396   slot->set_prev_block(block);
 397   if (block->prev_block() != nullptr) {
 398     block->prev_block()->set_next_block(block);
 399   }
 400   if (_blocks == slot) {
 401     _blocks = block;
 402   }



 403   return block;
 404 }
 405 
 406 void FieldLayout::remove(LayoutRawBlock* block) {
 407   assert(block != nullptr, "Sanity check");
 408   assert(block != _last, "Sanity check");
 409   if (_blocks == block) {
 410     _blocks = block->next_block();
 411     if (_blocks != nullptr) {
 412       _blocks->set_prev_block(nullptr);
 413     }
 414   } else {
 415     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 416     block->prev_block()->set_next_block(block->next_block());
 417     block->next_block()->set_prev_block(block->prev_block());
 418   }
 419   if (block == _start) {
 420     _start = block->prev_block();
 421   }
 422 }
 423 
 424 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {












































































 425   ResourceMark rm;
 426   LayoutRawBlock* b = _blocks;
 427   while(b != _last) {
 428     switch(b->kind()) {
 429       case LayoutRawBlock::REGULAR: {
 430         FieldInfo* fi = _field_info->adr_at(b->field_index());
 431         output->print_cr(" @%d \"%s\" %s %d/%d %s",
 432                          b->offset(),
 433                          fi->name(_cp)->as_C_string(),
 434                          fi->signature(_cp)->as_C_string(),
 435                          b->size(),
 436                          b->alignment(),
 437                          "REGULAR");

 438         break;
 439       }
 440       case LayoutRawBlock::FLATTENED: {
 441         FieldInfo* fi = _field_info->adr_at(b->field_index());
 442         output->print_cr(" @%d \"%s\" %s %d/%d %s",


 443                          b->offset(),
 444                          fi->name(_cp)->as_C_string(),
 445                          fi->signature(_cp)->as_C_string(),
 446                          b->size(),
 447                          b->alignment(),
 448                          "FLATTENED");



 449         break;
 450       }
 451       case LayoutRawBlock::RESERVED: {
 452         output->print_cr(" @%d %d/- %s",
 453                          b->offset(),
 454                          b->size(),
 455                          "RESERVED");
 456         break;
 457       }
 458       case LayoutRawBlock::INHERITED: {
 459         assert(!is_static, "Static fields are not inherited in layouts");
 460         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 461         bool found = false;
 462         const InstanceKlass* ik = super;
 463         while (!found && ik != nullptr) {
 464           for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 465             if (fs.offset() == b->offset()) {
 466               output->print_cr(" @%d \"%s\" %s %d/%d %s",
 467                   b->offset(),
 468                   fs.name()->as_C_string(),
 469                   fs.signature()->as_C_string(),
 470                   b->size(),
 471                   b->size(), // so far, alignment constraint == size, will change with Valhalla
 472                   "INHERITED");

 473               found = true;
 474               break;
 475             }
 476           }
 477           ik = ik->java_super();
 478         }
 479         break;
 480       }
 481       case LayoutRawBlock::EMPTY:
 482         output->print_cr(" @%d %d/1 %s",
 483                          b->offset(),
 484                          b->size(),
 485                         "EMPTY");
 486         break;
 487       case LayoutRawBlock::PADDING:
 488         output->print_cr(" @%d %d/1 %s",
 489                          b->offset(),
 490                          b->size(),
 491                         "PADDING");
 492         break;












 493     }
 494     b = b->next_block();
 495   }
 496 }
 497 
 498 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 499       GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :

 500   _classname(classname),

 501   _super_klass(super_klass),
 502   _constant_pool(constant_pool),
 503   _field_info(field_info),
 504   _info(info),

 505   _root_group(nullptr),
 506   _contended_groups(GrowableArray<FieldGroup*>(8)),
 507   _static_fields(nullptr),
 508   _layout(nullptr),
 509   _static_layout(nullptr),
 510   _nonstatic_oopmap_count(0),
 511   _alignment(-1),












 512   _has_nonstatic_fields(false),
 513   _is_contended(is_contended) {}
 514 




 515 
 516 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 517   assert(g > 0, "must only be called for named contended groups");
 518   FieldGroup* fg = nullptr;
 519   for (int i = 0; i < _contended_groups.length(); i++) {
 520     fg = _contended_groups.at(i);
 521     if (fg->contended_group() == g) return fg;
 522   }
 523   fg = new FieldGroup(g);
 524   _contended_groups.append(fg);
 525   return fg;
 526 }
 527 
 528 void FieldLayoutBuilder::prologue() {
 529   _layout = new FieldLayout(_field_info, _constant_pool);
 530   const InstanceKlass* super_klass = _super_klass;
 531   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);

 532   if (super_klass != nullptr) {
 533     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 534   }
 535   _static_layout = new FieldLayout(_field_info, _constant_pool);
 536   _static_layout->initialize_static_layout();
 537   _static_fields = new FieldGroup();
 538   _root_group = new FieldGroup();
 539 }
 540 
 541 // Field sorting for regular classes:
 542 //   - fields are sorted in static and non-static fields
 543 //   - non-static fields are also sorted according to their contention group
 544 //     (support of the @Contended annotation)
 545 //   - @Contended annotation is ignored for static fields

 546 void FieldLayoutBuilder::regular_field_sorting() {
 547   int idx = 0;
 548   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 549     FieldInfo ctrl = _field_info->at(0);
 550     FieldGroup* group = nullptr;
 551     FieldInfo fieldinfo = *it;
 552     if (fieldinfo.access_flags().is_static()) {
 553       group = _static_fields;
 554     } else {
 555       _has_nonstatic_fields = true;
 556       if (fieldinfo.field_flags().is_contended()) {
 557         int g = fieldinfo.contended_group();
 558         if (g == 0) {
 559           group = new FieldGroup(true);
 560           _contended_groups.append(group);
 561         } else {
 562           group = get_or_create_contended_group(g);
 563         }
 564       } else {
 565         group = _root_group;
 566       }
 567     }
 568     assert(group != nullptr, "invariant");
 569     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 570     switch(type) {
 571       case T_BYTE:
 572       case T_CHAR:
 573       case T_DOUBLE:
 574       case T_FLOAT:
 575       case T_INT:
 576       case T_LONG:
 577       case T_SHORT:
 578       case T_BOOLEAN:
 579         group->add_primitive_field(idx, type);
 580         break;
 581       case T_OBJECT:
 582       case T_ARRAY:










 583         if (group != _static_fields) _nonstatic_oopmap_count++;
 584         group->add_oop_field(idx);
 585         break;
 586       default:
 587         fatal("Something wrong?");













 588     }
 589   }
 590   _root_group->sort_by_size();
 591   _static_fields->sort_by_size();
 592   if (!_contended_groups.is_empty()) {
 593     for (int i = 0; i < _contended_groups.length(); i++) {
 594       _contended_groups.at(i)->sort_by_size();
 595     }
 596   }
 597 }
 598 




















































































 599 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 600   if (ContendedPaddingWidth > 0) {
 601     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 602     _layout->insert(slot, padding);
 603   }
 604 }
 605 
 606 // Computation of regular classes layout is an evolution of the previous default layout
 607 // (FieldAllocationStyle 1):
 608 //   - primitive fields are allocated first (from the biggest to the smallest)

 609 //   - oop fields are allocated, either in existing gaps or at the end of
 610 //     the layout. We allocate oops in a single block to have a single oop map entry.
 611 //   - if the super class ended with an oop, we lead with oops. That will cause the
 612 //     trailing oop map entry of the super class and the oop map entry of this class
 613 //     to be folded into a single entry later. Correspondingly, if the super class
 614 //     ends with a primitive field, we gain nothing by leading with oops; therefore
 615 //     we let oop fields trail, thus giving future derived classes the chance to apply
 616 //     the same trick.
 617 void FieldLayoutBuilder::compute_regular_layout() {
 618   bool need_tail_padding = false;
 619   prologue();
 620   regular_field_sorting();
 621 
 622   if (_is_contended) {
 623     _layout->set_start(_layout->last_block());
 624     // insertion is currently easy because the current strategy doesn't try to fill holes
 625     // in super classes layouts => the _start block is by consequence the _last_block
 626     insert_contended_padding(_layout->start());
 627     need_tail_padding = true;
 628   }
 629 
 630   if (_super_ends_with_oop) {
 631     _layout->add(_root_group->oop_fields());
 632     _layout->add(_root_group->primitive_fields());

 633   } else {
 634     _layout->add(_root_group->primitive_fields());

 635     _layout->add(_root_group->oop_fields());
 636   }
 637 
 638   if (!_contended_groups.is_empty()) {
 639     for (int i = 0; i < _contended_groups.length(); i++) {
 640       FieldGroup* cg = _contended_groups.at(i);
 641       LayoutRawBlock* start = _layout->last_block();
 642       insert_contended_padding(start);
 643       _layout->add(cg->primitive_fields(), start);

 644       _layout->add(cg->oop_fields(), start);
 645       need_tail_padding = true;
 646     }
 647   }
 648 
 649   if (need_tail_padding) {
 650     insert_contended_padding(_layout->last_block());
 651   }
 652 
 653   _static_layout->add_contiguously(this->_static_fields->oop_fields());
 654   _static_layout->add(this->_static_fields->primitive_fields());


 655 
 656   epilogue();
 657 }
 658 
 659 void FieldLayoutBuilder::epilogue() {
 660   // Computing oopmaps
 661   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
 662   int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;









 663 
 664   OopMapBlocksBuilder* nonstatic_oop_maps =
 665       new OopMapBlocksBuilder(max_oop_map_count);
 666   if (super_oop_map_count > 0) {
 667     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
 668     _super_klass->nonstatic_oop_map_count());






























































































































































































































 669   }

 670 
 671   if (_root_group->oop_fields() != nullptr) {
 672     for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
 673       LayoutRawBlock* b = _root_group->oop_fields()->at(i);















 674       nonstatic_oop_maps->add(b->offset(), 1);
 675     }
 676   }



 677 










 678   if (!_contended_groups.is_empty()) {
 679     for (int i = 0; i < _contended_groups.length(); i++) {
 680       FieldGroup* cg = _contended_groups.at(i);
 681       if (cg->oop_count() > 0) {
 682         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
 683         nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
 684       }
 685     }
 686   }
 687 
 688   nonstatic_oop_maps->compact();
 689 
 690   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
 691   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
 692   int static_fields_size = (static_fields_end -
 693       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
 694   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
 695 
 696   // Pass back information needed for InstanceKlass creation
 697 
 698   _info->oop_map_blocks = nonstatic_oop_maps;
 699   _info->_instance_size = align_object_size(instance_end / wordSize);
 700   _info->_static_field_size = static_fields_size;
 701   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
 702   _info->_has_nonstatic_fields = _has_nonstatic_fields;













































 703 
 704   if (PrintFieldLayout) {



 705     ResourceMark rm;
 706     tty->print_cr("Layout of class %s", _classname->as_C_string());
 707     tty->print_cr("Instance fields:");
 708     _layout->print(tty, false, _super_klass);
 709     tty->print_cr("Static fields:");
 710     _static_layout->print(tty, true, nullptr);
 711     tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
 712     tty->print_cr("---");



































 713   }
 714 }
 715 
 716 void FieldLayoutBuilder::build_layout() {
 717   compute_regular_layout();




 718 }

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "jvm.h"
  30 #include "memory/resourceArea.hpp"
  31 #include "oops/array.hpp"
  32 #include "oops/fieldStreams.inline.hpp"
  33 #include "oops/instanceMirrorKlass.hpp"
  34 #include "oops/instanceKlass.inline.hpp"
  35 #include "oops/klass.inline.hpp"
  36 #include "oops/inlineKlass.inline.hpp"
  37 #include "runtime/fieldDescriptor.inline.hpp"
  38 #include "utilities/powerOfTwo.hpp"
  39 
  40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
  41                                          bool use_atomic_flat) {
  42 
  43   if (!UseFieldFlattening) {
  44     return LayoutKind::REFERENCE;
  45   }
  46 
  47   if (field_info.field_flags().is_injected()) {
  48     // don't flatten injected fields
  49     return LayoutKind::REFERENCE;
  50   }
  51 
  52   if (field_info.access_flags().is_volatile()) {
  53     // volatile is used as a keyword to prevent flattening
  54     return LayoutKind::REFERENCE;
  55   }
  56 
  57   if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
  58     // field's type is not a known value class, using a reference
  59     return LayoutKind::REFERENCE;
  60   }
  61 
  62   InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
  63   InlineKlass* vk = inline_field_info->klass();
  64 
  65   if (field_info.field_flags().is_null_free_inline_type()) {
  66     assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
  67     if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
  68       if (vk->is_naturally_atomic() && vk->has_non_atomic_layout()) return LayoutKind::NON_ATOMIC_FLAT;
  69       return (vk->has_atomic_layout() && use_atomic_flat) ? LayoutKind::ATOMIC_FLAT : LayoutKind::REFERENCE;
  70     } else {
  71       return vk->has_non_atomic_layout() ? LayoutKind::NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
  72     }
  73   } else {
  74     if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
  75       return use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  76     } else {
  77       return LayoutKind::REFERENCE;
  78     }
  79   }
  80 }
  81 
  82 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
  83   switch(kind) {
  84     case LayoutKind::NON_ATOMIC_FLAT:
  85       *size = vk->non_atomic_size_in_bytes();
  86       *alignment = vk->non_atomic_alignment();
  87       break;
  88     case LayoutKind::ATOMIC_FLAT:
  89       *size = vk->atomic_size_in_bytes();
  90       *alignment = *size;
  91       break;
  92     case LayoutKind::NULLABLE_ATOMIC_FLAT:
  93       *size = vk->nullable_atomic_size_in_bytes();
  94       *alignment = *size;
  95     break;
  96     default:
  97       ShouldNotReachHere();
  98   }
  99 }
 100 
 101 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
 102   _next_block(nullptr),
 103   _prev_block(nullptr),
 104   _inline_klass(nullptr),
 105   _block_kind(kind),
 106   _layout_kind(LayoutKind::UNKNOWN),
 107   _offset(-1),
 108   _alignment(1),
 109   _size(size),
 110   _field_index(-1) {
 111   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,

 112          "Otherwise, should use the constructor with a field index argument");
 113   assert(size > 0, "Sanity check");
 114 }
 115 
 116 
 117 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
 118  _next_block(nullptr),
 119  _prev_block(nullptr),
 120  _inline_klass(nullptr),
 121  _block_kind(kind),
 122  _layout_kind(LayoutKind::UNKNOWN),
 123  _offset(-1),
 124  _alignment(alignment),
 125  _size(size),
 126  _field_index(index) {
 127   assert(kind == REGULAR || kind == FLAT || kind == INHERITED,

 128          "Other kind do not have a field index");
 129   assert(size > 0, "Sanity check");
 130   assert(alignment > 0, "Sanity check");
 131 }
 132 
 133 bool LayoutRawBlock::fit(int size, int alignment) {
 134   int adjustment = 0;
 135   if ((_offset % alignment) != 0) {
 136     adjustment = alignment - (_offset % alignment);
 137   }
 138   return _size >= size + adjustment;
 139 }
 140 
 141 FieldGroup::FieldGroup(int contended_group) :
 142   _next(nullptr),
 143   _small_primitive_fields(nullptr),
 144   _big_primitive_fields(nullptr),
 145   _oop_fields(nullptr),
 146   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
 147   _oop_count(0) {}
 148 
 149 void FieldGroup::add_primitive_field(int idx, BasicType type) {
 150   int size = type2aelembytes(type);
 151   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
 152   if (size >= oopSize) {
 153     add_to_big_primitive_list(block);
 154   } else {
 155     add_to_small_primitive_list(block);
 156   }

 157 }
 158 
 159 void FieldGroup::add_oop_field(int idx) {
 160   int size = type2aelembytes(T_OBJECT);
 161   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
 162   if (_oop_fields == nullptr) {
 163     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 164   }
 165   _oop_fields->append(block);
 166   _oop_count++;
 167 }
 168 
 169 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
 170   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
 171   block->set_inline_klass(vk);
 172   block->set_layout_kind(lk);
 173   if (block->size() >= oopSize) {
 174     add_to_big_primitive_list(block);
 175   } else {
 176     add_to_small_primitive_list(block);
 177   }
 178 }
 179 
 180 void FieldGroup::sort_by_size() {
 181   if (_small_primitive_fields != nullptr) {
 182     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 183   }
 184   if (_big_primitive_fields != nullptr) {
 185     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 186   }
 187 }
 188 
 189 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 190   if (_small_primitive_fields == nullptr) {
 191     _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 192   }
 193   _small_primitive_fields->append(block);
 194 }
 195 
 196 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 197   if (_big_primitive_fields == nullptr) {
 198     _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 199   }
 200   _big_primitive_fields->append(block);
 201 }
 202 
 203 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
 204   _field_info(field_info),
 205   _inline_layout_info_array(inline_layout_info_array),
 206   _cp(cp),
 207   _blocks(nullptr),
 208   _start(_blocks),
 209   _last(_blocks),
 210   _super_first_field_offset(-1),
 211   _super_alignment(-1),
 212   _super_min_align_required(-1),
 213   _null_reset_value_offset(-1),
 214   _super_has_fields(false),
 215   _has_inherited_fields(false) {}
 216 
 217 void FieldLayout::initialize_static_layout() {
 218   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 219   _blocks->set_offset(0);
 220   _last = _blocks;
 221   _start = _blocks;
 222   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 223   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 224   // of static field is computed. Field offsets are fixed later when the size is known
 225   // (see java_lang_Class::fixup_mirror())
 226   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 227     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 228     _blocks->set_offset(0);
 229   }
 230 }
 231 
 232 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 233   if (super_klass == nullptr) {
 234     super_ends_with_oop = false;
 235     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 236     _blocks->set_offset(0);
 237     _last = _blocks;
 238     _start = _blocks;
 239     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 240   } else {
 241     reconstruct_layout(super_klass, _super_has_fields, super_ends_with_oop);

 242     fill_holes(super_klass);
 243     if ((!super_klass->has_contended_annotations()) || !_super_has_fields) {
 244       _start = _blocks;  // start allocating fields from the first empty block
 245     } else {
 246       _start = _last;    // append fields at the end of the reconstructed layout
 247     }
 248   }
 249 }
 250 
 251 LayoutRawBlock* FieldLayout::first_field_block() {
 252   LayoutRawBlock* block = _blocks;
 253   while (block != nullptr
 254          && block->block_kind() != LayoutRawBlock::INHERITED
 255          && block->block_kind() != LayoutRawBlock::REGULAR
 256          && block->block_kind() != LayoutRawBlock::FLAT
 257          && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 258     block = block->next_block();
 259   }
 260   return block;
 261 }
 262 
 263 // Insert a set of fields into a layout.
 264 // For each field, search for an empty slot able to fit the field

 265 // (satisfying both size and alignment requirements), if none is found,
 266 // add the field at the end of the layout.
 267 // Fields cannot be inserted before the block specified in the "start" argument
 268 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 269   if (list == nullptr) return;
 270   if (start == nullptr) start = this->_start;
 271   bool last_search_success = false;
 272   int last_size = 0;
 273   int last_alignment = 0;
 274   for (int i = 0; i < list->length(); i ++) {
 275     LayoutRawBlock* b = list->at(i);
 276     LayoutRawBlock* cursor = nullptr;
 277     LayoutRawBlock* candidate = nullptr;

 278     // if start is the last block, just append the field
 279     if (start == last_block()) {
 280       candidate = last_block();
 281     }
 282     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 283     // check if the previous field had the same requirements and if the search for a fitting slot
 284     // was successful. If the requirements were the same but the search failed, a new search will
 285     // fail the same way, so just append the field at the of the layout.
 286     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 287       candidate = last_block();
 288     } else {
 289       // Iterate over the layout to find an empty slot fitting the field's requirements
 290       last_size = b->size();
 291       last_alignment = b->alignment();
 292       cursor = last_block()->prev_block();
 293       assert(cursor != nullptr, "Sanity check");
 294       last_search_success = true;
 295 
 296       while (cursor != start) {
 297         if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 298           if (candidate == nullptr || cursor->size() < candidate->size()) {
 299             candidate = cursor;
 300           }
 301         }
 302         cursor = cursor->prev_block();
 303       }
 304       if (candidate == nullptr) {
 305         candidate = last_block();
 306         last_search_success = false;
 307       }
 308       assert(candidate != nullptr, "Candidate must not be null");
 309       assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 310       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 311     }

 312     insert_field_block(candidate, b);
 313   }
 314 }
 315 
 316 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 317 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 318   assert(block != nullptr, "Sanity check");
 319   block->set_offset(offset);
 320   if (start == nullptr) {
 321     start = this->_start;
 322   }
 323   LayoutRawBlock* slot = start;
 324   while (slot != nullptr) {
 325     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 326         slot == _last){
 327       assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 328       assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
 329       if (slot->offset() < block->offset()) {
 330         int adjustment = block->offset() - slot->offset();
 331         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 332         insert(slot, adj);
 333       }
 334       insert(slot, block);
 335       if (slot->size() == 0) {
 336         remove(slot);
 337       }
 338       if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
 339         _field_info->adr_at(block->field_index())->set_offset(block->offset());
 340       }
 341       return;
 342     }
 343     slot = slot->next_block();
 344   }
 345   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 346 }
 347 
 348 // The allocation logic uses a best fit strategy: the set of fields is allocated
 349 // in the first empty slot big enough to contain the whole set ((including padding
 350 // to fit alignment constraints).
 351 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 352   if (list == nullptr) return;
 353   if (start == nullptr) {
 354     start = _start;
 355   }
 356   // This code assumes that if the first block is well aligned, the following
 357   // blocks would naturally be well aligned (no need for adjustment)
 358   int size = 0;
 359   for (int i = 0; i < list->length(); i++) {
 360     size += list->at(i)->size();
 361   }
 362 
 363   LayoutRawBlock* candidate = nullptr;
 364   if (start == last_block()) {
 365     candidate = last_block();
 366   } else {
 367     LayoutRawBlock* first = list->at(0);
 368     candidate = last_block()->prev_block();
 369     while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 370       if (candidate == start) {
 371         candidate = last_block();
 372         break;
 373       }
 374       candidate = candidate->prev_block();
 375     }
 376     assert(candidate != nullptr, "Candidate must not be null");
 377     assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 378     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 379   }
 380 
 381   for (int i = 0; i < list->length(); i++) {
 382     LayoutRawBlock* b = list->at(i);
 383     insert_field_block(candidate, b);
 384     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 385   }
 386 }
 387 
 388 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 389   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 390   if (slot->offset() % block->alignment() != 0) {
 391     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 392     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 393     insert(slot, adj);
 394   }
 395   assert(block->size() >= block->size(), "Enough space must remain after adjustment");
 396   insert(slot, block);
 397   if (slot->size() == 0) {
 398     remove(slot);
 399   }
 400   // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
 401   if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 402     _field_info->adr_at(block->field_index())->set_offset(block->offset());
 403     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
 404       _null_reset_value_offset = block->offset();
 405     }
 406   }
 407   if (block->block_kind() == LayoutRawBlock::FLAT && block->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
 408     int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
 409     _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 410     _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 411   }
 412 
 413   return block;
 414 }
 415 
 416 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
 417   has_instance_fields = ends_with_oop = false;
 418   if (ik->is_abstract() && !ik->is_identity_class()) {
 419     _super_alignment = type2aelembytes(BasicType::T_LONG);
 420   }
 421   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 422   BasicType last_type;
 423   int last_offset = -1;
 424   while (ik != nullptr) {
 425     for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 426       BasicType type = Signature::basic_type(fs.signature());
 427       // distinction between static and non-static fields is missing
 428       if (fs.access_flags().is_static()) continue;
 429       has_instance_fields = true;
 430       _has_inherited_fields = true;
 431       if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
 432         _super_first_field_offset = fs.offset();
 433       }
 434       LayoutRawBlock* block;
 435       if (fs.is_flat()) {
 436         InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
 437         InlineKlass* vk = layout_info.klass();
 438         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
 439                                    vk->layout_size_in_bytes(layout_info.kind()),
 440                                    vk->layout_alignment(layout_info.kind()));
 441         assert(_super_alignment == -1 || _super_alignment >=  vk->payload_alignment(), "Invalid value alignment");
 442         _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
 443       } else {
 444         int size = type2aelembytes(type);
 445         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 446         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
 447         // For primitive types, the alignment is equal to the size
 448         assert(_super_alignment == -1 || _super_alignment >=  size, "Invalid value alignment");
 449         _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
 450       }
 451       if (fs.offset() > last_offset) {
 452         last_offset = fs.offset();
 453         last_type = type;
 454       }



 455       block->set_offset(fs.offset());
 456       all_fields->append(block);
 457     }
 458     ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
 459   }
 460   assert(last_offset == -1 || last_offset > 0, "Sanity");
 461   if (last_offset > 0 &&
 462       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 463     ends_with_oop = true;
 464   }
 465 
 466   all_fields->sort(LayoutRawBlock::compare_offset);
 467   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 468   _blocks->set_offset(0);
 469   _last = _blocks;

 470   for(int i = 0; i < all_fields->length(); i++) {
 471     LayoutRawBlock* b = all_fields->at(i);
 472     _last->set_next_block(b);
 473     b->set_prev_block(_last);
 474     _last = b;
 475   }
 476   _start = _blocks;
 477 }
 478 
 479 // Called during the reconstruction of a layout, after fields from super
 480 // classes have been inserted. It fills unused slots between inserted fields
 481 // with EMPTY blocks, so the regular field insertion methods would work.
 482 // This method handles classes with @Contended annotations differently
 483 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 484 // fields to interfere with contended fields/classes.
 485 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 486   assert(_blocks != nullptr, "Sanity check");
 487   assert(_blocks->offset() == 0, "first block must be at offset zero");
 488   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 489   LayoutRawBlock* b = _blocks;
 490   while (b->next_block() != nullptr) {
 491     if (b->next_block()->offset() > (b->offset() + b->size())) {
 492       int size = b->next_block()->offset() - (b->offset() + b->size());
 493       // FIXME it would be better if initial empty block where tagged as PADDING for value classes
 494       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 495       empty->set_offset(b->offset() + b->size());
 496       empty->set_next_block(b->next_block());
 497       b->next_block()->set_prev_block(empty);
 498       b->set_next_block(empty);
 499       empty->set_prev_block(b);
 500     }
 501     b = b->next_block();
 502   }
 503   assert(b->next_block() == nullptr, "Invariant at this point");
 504   assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");

 505   // If the super class has @Contended annotation, a padding block is
 506   // inserted at the end to ensure that fields from the subclasses won't share
 507   // the cache line of the last field of the contended class
 508   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 509     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 510     p->set_offset(b->offset() + b->size());
 511     b->set_next_block(p);
 512     p->set_prev_block(b);
 513     b = p;
 514   }
 515 
 516   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 517   last->set_offset(b->offset() + b->size());
 518   assert(last->offset() > 0, "Sanity check");
 519   b->set_next_block(last);
 520   last->set_prev_block(b);
 521   _last = last;
 522 }
 523 
 524 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 525   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 526   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 527   block->set_offset(slot->offset());
 528   slot->set_offset(slot->offset() + block->size());
 529   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 530   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 531   slot->set_size(slot->size() - block->size());
 532   block->set_prev_block(slot->prev_block());
 533   block->set_next_block(slot);
 534   slot->set_prev_block(block);
 535   if (block->prev_block() != nullptr) {
 536     block->prev_block()->set_next_block(block);
 537   }
 538   if (_blocks == slot) {
 539     _blocks = block;
 540   }
 541   if (_start == slot) {
 542     _start = block;
 543   }
 544   return block;
 545 }
 546 
 547 void FieldLayout::remove(LayoutRawBlock* block) {
 548   assert(block != nullptr, "Sanity check");
 549   assert(block != _last, "Sanity check");
 550   if (_blocks == block) {
 551     _blocks = block->next_block();
 552     if (_blocks != nullptr) {
 553       _blocks->set_prev_block(nullptr);
 554     }
 555   } else {
 556     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 557     block->prev_block()->set_next_block(block->next_block());
 558     block->next_block()->set_prev_block(block->prev_block());
 559   }
 560   if (block == _start) {
 561     _start = block->prev_block();
 562   }
 563 }
 564 
 565 void FieldLayout::shift_fields(int shift) {
 566   LayoutRawBlock* b = first_field_block();
 567   LayoutRawBlock* previous = b->prev_block();
 568   if (previous->block_kind() == LayoutRawBlock::EMPTY) {
 569     previous->set_size(previous->size() + shift);
 570   } else {
 571     LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
 572     nb->set_offset(b->offset());
 573     previous->set_next_block(nb);
 574     nb->set_prev_block(previous);
 575     b->set_prev_block(nb);
 576     nb->set_next_block(b);
 577   }
 578   while (b != nullptr) {
 579     b->set_offset(b->offset() + shift);
 580     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
 581       _field_info->adr_at(b->field_index())->set_offset(b->offset());
 582       if (b->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
 583         int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
 584         _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 585         _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 586 
 587       }
 588     }
 589     assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
 590     b = b->next_block();
 591   }
 592 }
 593 
 594 LayoutRawBlock* FieldLayout::find_null_marker() {
 595   LayoutRawBlock* b = _blocks;
 596   while (b != nullptr) {
 597     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 598       return b;
 599     }
 600     b = b->next_block();
 601   }
 602   ShouldNotReachHere();
 603 }
 604 
 605 void FieldLayout::remove_null_marker() {
 606   LayoutRawBlock* b = first_field_block();
 607   while (b != nullptr) {
 608     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 609       if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
 610         LayoutRawBlock* n = b->next_block();
 611         remove(b);
 612         n->set_offset(b->offset());
 613         n->set_size(n->size() + b->size());
 614       } else {
 615         b->set_block_kind(LayoutRawBlock::EMPTY);
 616       }
 617       return;
 618     }
 619     b = b->next_block();
 620   }
 621   ShouldNotReachHere(); // if we reach this point, the null marker was not found!
 622 }
 623 
 624 static const char* layout_kind_to_string(LayoutKind lk) {
 625   switch(lk) {
 626     case LayoutKind::REFERENCE:
 627       return "REFERENCE";
 628     case LayoutKind::NON_ATOMIC_FLAT:
 629       return "NON_ATOMIC_FLAT";
 630     case LayoutKind::ATOMIC_FLAT:
 631       return "ATOMIC_FLAT";
 632     case LayoutKind::NULLABLE_ATOMIC_FLAT:
 633       return "NULLABLE_ATOMIC_FLAT";
 634     case LayoutKind::UNKNOWN:
 635       return "UNKNOWN";
 636     default:
 637       ShouldNotReachHere();
 638   }
 639 }
 640 
 641 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields) {
 642   ResourceMark rm;
 643   LayoutRawBlock* b = _blocks;
 644   while(b != _last) {
 645     switch(b->block_kind()) {
 646       case LayoutRawBlock::REGULAR: {
 647         FieldInfo* fi = _field_info->adr_at(b->field_index());
 648         output->print_cr(" @%d %s %d/%d \"%s\" %s",
 649                          b->offset(),
 650                          "REGULAR",

 651                          b->size(),
 652                          b->alignment(),
 653                          fi->name(_cp)->as_C_string(),
 654                          fi->signature(_cp)->as_C_string());
 655         break;
 656       }
 657       case LayoutRawBlock::FLAT: {
 658         FieldInfo* fi = _field_info->adr_at(b->field_index());
 659         InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
 660         assert(ik != nullptr, "");
 661         output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
 662                          b->offset(),
 663                          "FLAT",

 664                          b->size(),
 665                          b->alignment(),
 666                          fi->name(_cp)->as_C_string(),
 667                          fi->signature(_cp)->as_C_string(),
 668                          ik->name()->as_C_string(),
 669                          ik->class_loader_data(), layout_kind_to_string(b->layout_kind()));
 670         break;
 671       }
 672       case LayoutRawBlock::RESERVED: {
 673         output->print_cr(" @%d %s %d/-",
 674                          b->offset(),
 675                          "RESERVED",
 676                          b->size());
 677         break;
 678       }
 679       case LayoutRawBlock::INHERITED: {
 680         assert(!is_static, "Static fields are not inherited in layouts");
 681         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 682         bool found = false;
 683         const InstanceKlass* ik = super;
 684         while (!found && ik != nullptr) {
 685           for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
 686             if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
 687               output->print_cr(" @%d %s %d/%d \"%s\" %s",
 688                   b->offset(),
 689                   "INHERITED",

 690                   b->size(),
 691                   b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
 692                   fs.name()->as_C_string(),
 693                   fs.signature()->as_C_string());
 694               found = true;
 695               break;
 696             }


 697         }
 698         ik = ik->java_super();
 699       }
 700       break;
 701     }
 702     case LayoutRawBlock::EMPTY:
 703       output->print_cr(" @%d %s %d/1",
 704                        b->offset(),
 705                       "EMPTY",
 706                        b->size());
 707       break;
 708     case LayoutRawBlock::PADDING:
 709       output->print_cr(" @%d %s %d/1",
 710                       b->offset(),
 711                       "PADDING",
 712                       b->size());
 713       break;
 714     case LayoutRawBlock::NULL_MARKER:
 715     {
 716       output->print_cr(" @%d %s %d/1 ",
 717                       b->offset(),
 718                       "NULL_MARKER",
 719                       b->size());
 720       break;
 721     }
 722     default:
 723       fatal("Unknown block type");
 724     }
 725     b = b->next_block();
 726   }
 727 }
 728 
 729 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 730                                        GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
 731                                        bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
 732   _classname(classname),
 733   _loader_data(loader_data),
 734   _super_klass(super_klass),
 735   _constant_pool(constant_pool),
 736   _field_info(field_info),
 737   _info(info),
 738   _inline_layout_info_array(inline_layout_info_array),
 739   _root_group(nullptr),
 740   _contended_groups(GrowableArray<FieldGroup*>(8)),
 741   _static_fields(nullptr),
 742   _layout(nullptr),
 743   _static_layout(nullptr),
 744   _nonstatic_oopmap_count(0),
 745   _payload_alignment(-1),
 746   _payload_offset(-1),
 747   _null_marker_offset(-1),
 748   _payload_size_in_bytes(-1),
 749   _non_atomic_layout_size_in_bytes(-1),
 750   _non_atomic_layout_alignment(-1),
 751   _atomic_layout_size_in_bytes(-1),
 752   _nullable_layout_size_in_bytes(-1),
 753   _fields_size_sum(0),
 754   _declared_non_static_fields_count(0),
 755   _has_non_naturally_atomic_fields(false),
 756   _is_naturally_atomic(false),
 757   _must_be_atomic(must_be_atomic),
 758   _has_nonstatic_fields(false),
 759   _has_inline_type_fields(false),
 760   _is_contended(is_contended),
 761   _is_inline_type(is_inline_type),
 762   _is_abstract_value(is_abstract_value),
 763   _has_flattening_information(is_inline_type),
 764   _is_empty_inline_class(false) {}
 765 
 766 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 767   assert(g > 0, "must only be called for named contended groups");
 768   FieldGroup* fg = nullptr;
 769   for (int i = 0; i < _contended_groups.length(); i++) {
 770     fg = _contended_groups.at(i);
 771     if (fg->contended_group() == g) return fg;
 772   }
 773   fg = new FieldGroup(g);
 774   _contended_groups.append(fg);
 775   return fg;
 776 }
 777 
 778 void FieldLayoutBuilder::prologue() {
 779   _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 780   const InstanceKlass* super_klass = _super_klass;
 781   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
 782   _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
 783   if (super_klass != nullptr) {
 784     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 785   }
 786   _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 787   _static_layout->initialize_static_layout();
 788   _static_fields = new FieldGroup();
 789   _root_group = new FieldGroup();
 790 }
 791 
 792 // Field sorting for regular (non-inline) classes:
 793 //   - fields are sorted in static and non-static fields
 794 //   - non-static fields are also sorted according to their contention group
 795 //     (support of the @Contended annotation)
 796 //   - @Contended annotation is ignored for static fields
 797 //   - field flattening decisions are taken in this method
 798 void FieldLayoutBuilder::regular_field_sorting() {
 799   int idx = 0;
 800   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {

 801     FieldGroup* group = nullptr;
 802     FieldInfo fieldinfo = *it;
 803     if (fieldinfo.access_flags().is_static()) {
 804       group = _static_fields;
 805     } else {
 806       _has_nonstatic_fields = true;
 807       if (fieldinfo.field_flags().is_contended()) {
 808         int g = fieldinfo.contended_group();
 809         if (g == 0) {
 810           group = new FieldGroup(true);
 811           _contended_groups.append(group);
 812         } else {
 813           group = get_or_create_contended_group(g);
 814         }
 815       } else {
 816         group = _root_group;
 817       }
 818     }
 819     assert(group != nullptr, "invariant");
 820     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 821     switch(type) {
 822     case T_BYTE:
 823     case T_CHAR:
 824     case T_DOUBLE:
 825     case T_FLOAT:
 826     case T_INT:
 827     case T_LONG:
 828     case T_SHORT:
 829     case T_BOOLEAN:
 830       group->add_primitive_field(idx, type);
 831       break;
 832     case T_OBJECT:
 833     case T_ARRAY:
 834     {
 835       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
 836       if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
 837           || (!fieldinfo.field_flags().is_injected()
 838               && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
 839               && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
 840         _has_inline_type_fields = true;
 841         _has_flattening_information = true;
 842       }
 843       if (lk == LayoutKind::REFERENCE) {
 844         if (group != _static_fields) _nonstatic_oopmap_count++;
 845         group->add_oop_field(idx);
 846       } else {
 847         _has_flattening_information = true;
 848         InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
 849         int size, alignment;
 850         get_size_and_alignment(vk, lk, &size, &alignment);
 851         group->add_flat_field(idx, vk, lk, size, alignment);
 852         _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
 853         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 854         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 855         _field_info->adr_at(idx)->set_layout_kind(lk);
 856         // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
 857       }
 858       break;
 859     }
 860     default:
 861       fatal("Something wrong?");
 862     }
 863   }
 864   _root_group->sort_by_size();
 865   _static_fields->sort_by_size();
 866   if (!_contended_groups.is_empty()) {
 867     for (int i = 0; i < _contended_groups.length(); i++) {
 868       _contended_groups.at(i)->sort_by_size();
 869     }
 870   }
 871 }
 872 
 873 /* Field sorting for inline classes:
 874  *   - because inline classes are immutable, the @Contended annotation is ignored
 875  *     when computing their layout (with only read operation, there's no false
 876  *     sharing issue)
 877  *   - this method also records the alignment of the field with the most
 878  *     constraining alignment, this value is then used as the alignment
 879  *     constraint when flattening this inline type into another container
 880  *   - field flattening decisions are taken in this method (those decisions are
 881  *     currently only based in the size of the fields to be flattened, the size
 882  *     of the resulting instance is not considered)
 883  */
 884 void FieldLayoutBuilder::inline_class_field_sorting() {
 885   assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
 886   int alignment = -1;
 887   int idx = 0;
 888   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 889     FieldGroup* group = nullptr;
 890     FieldInfo fieldinfo = *it;
 891     int field_alignment = 1;
 892     if (fieldinfo.access_flags().is_static()) {
 893       group = _static_fields;
 894     } else {
 895       _has_nonstatic_fields = true;
 896       _declared_non_static_fields_count++;
 897       group = _root_group;
 898     }
 899     assert(group != nullptr, "invariant");
 900     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 901     switch(type) {
 902     case T_BYTE:
 903     case T_CHAR:
 904     case T_DOUBLE:
 905     case T_FLOAT:
 906     case T_INT:
 907     case T_LONG:
 908     case T_SHORT:
 909     case T_BOOLEAN:
 910       if (group != _static_fields) {
 911         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 912       }
 913       group->add_primitive_field(fieldinfo.index(), type);
 914       break;
 915     case T_OBJECT:
 916     case T_ARRAY:
 917     {
 918       bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
 919       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
 920       if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
 921           || (!fieldinfo.field_flags().is_injected()
 922               && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
 923               && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
 924         _has_inline_type_fields = true;
 925         _has_flattening_information = true;
 926       }
 927       if (lk == LayoutKind::REFERENCE) {
 928         if (group != _static_fields) {
 929           _nonstatic_oopmap_count++;
 930           field_alignment = type2aelembytes(type); // alignment == size for oops
 931         }
 932         group->add_oop_field(idx);
 933       } else {
 934         _has_flattening_information = true;
 935         InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
 936         if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
 937         int size, alignment;
 938         get_size_and_alignment(vk, lk, &size, &alignment);
 939         group->add_flat_field(idx, vk, lk, size, alignment);
 940         _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
 941         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 942         field_alignment = alignment;
 943         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 944         _field_info->adr_at(idx)->set_layout_kind(lk);
 945       }
 946       break;
 947     }
 948     default:
 949       fatal("Unexpected BasicType");
 950     }
 951     if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 952   }
 953   _payload_alignment = alignment;
 954   assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
 955 }
 956 
 957 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 958   if (ContendedPaddingWidth > 0) {
 959     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 960     _layout->insert(slot, padding);
 961   }
 962 }
 963 
 964 // Computation of regular classes layout is an evolution of the previous default layout
 965 // (FieldAllocationStyle 1):
 966 //   - primitive fields (both primitive types and flat inline types) are allocated
 967 //     first (from the biggest to the smallest)
 968 //   - oop fields are allocated, either in existing gaps or at the end of
 969 //     the layout. We allocate oops in a single block to have a single oop map entry.
 970 //   - if the super class ended with an oop, we lead with oops. That will cause the
 971 //     trailing oop map entry of the super class and the oop map entry of this class
 972 //     to be folded into a single entry later. Correspondingly, if the super class
 973 //     ends with a primitive field, we gain nothing by leading with oops; therefore
 974 //     we let oop fields trail, thus giving future derived classes the chance to apply
 975 //     the same trick.
 976 void FieldLayoutBuilder::compute_regular_layout() {
 977   bool need_tail_padding = false;
 978   prologue();
 979   regular_field_sorting();

 980   if (_is_contended) {
 981     _layout->set_start(_layout->last_block());
 982     // insertion is currently easy because the current strategy doesn't try to fill holes
 983     // in super classes layouts => the _start block is by consequence the _last_block
 984     insert_contended_padding(_layout->start());
 985     need_tail_padding = true;
 986   }
 987 
 988   if (_super_ends_with_oop) {
 989     _layout->add(_root_group->oop_fields());
 990     _layout->add(_root_group->big_primitive_fields());
 991     _layout->add(_root_group->small_primitive_fields());
 992   } else {
 993     _layout->add(_root_group->big_primitive_fields());
 994     _layout->add(_root_group->small_primitive_fields());
 995     _layout->add(_root_group->oop_fields());
 996   }
 997 
 998   if (!_contended_groups.is_empty()) {
 999     for (int i = 0; i < _contended_groups.length(); i++) {
1000       FieldGroup* cg = _contended_groups.at(i);
1001       LayoutRawBlock* start = _layout->last_block();
1002       insert_contended_padding(start);
1003       _layout->add(cg->big_primitive_fields());
1004       _layout->add(cg->small_primitive_fields(), start);
1005       _layout->add(cg->oop_fields(), start);
1006       need_tail_padding = true;
1007     }
1008   }
1009 
1010   if (need_tail_padding) {
1011     insert_contended_padding(_layout->last_block());
1012   }
1013 
1014   // Warning: IntanceMirrorKlass expects static oops to be allocated first
1015   _static_layout->add_contiguously(_static_fields->oop_fields());
1016   _static_layout->add(_static_fields->big_primitive_fields());
1017   _static_layout->add(_static_fields->small_primitive_fields());
1018 
1019   epilogue();
1020 }
1021 
1022 /* Computation of inline classes has a slightly different strategy than for
1023  * regular classes. Regular classes have their oop fields allocated at the end
1024  * of the layout to increase GC performances. Unfortunately, this strategy
1025  * increases the number of empty slots inside an instance. Because the purpose
1026  * of inline classes is to be embedded into other containers, it is critical
1027  * to keep their size as small as possible. For this reason, the allocation
1028  * strategy is:
1029  *   - big primitive fields (primitive types and flat inline type smaller
1030  *     than an oop) are allocated first (from the biggest to the smallest)
1031  *   - then oop fields
1032  *   - then small primitive fields (from the biggest to the smallest)
1033  */
1034 void FieldLayoutBuilder::compute_inline_class_layout() {
1035 
1036   // Test if the concrete inline class is an empty class (no instance fields)
1037   // and insert a dummy field if needed
1038   if (!_is_abstract_value) {
1039     bool declares_non_static_fields = false;
1040     for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
1041       FieldInfo fieldinfo = *it;
1042       if (!fieldinfo.access_flags().is_static()) {
1043         declares_non_static_fields = true;
1044         break;
1045       }
1046     }
1047     if (!declares_non_static_fields) {
1048       bool has_inherited_fields = false;
1049       const InstanceKlass* super = _super_klass;
1050       while(super != nullptr) {
1051         if (super->has_nonstatic_fields()) {
1052           has_inherited_fields = true;
1053           break;
1054         }
1055         super = super->super() == nullptr ? nullptr : InstanceKlass::cast(super->super());
1056       }
1057 
1058       if (!has_inherited_fields) {
1059         // Inject ".empty" dummy field
1060         _is_empty_inline_class = true;
1061         FieldInfo::FieldFlags fflags(0);
1062         fflags.update_injected(true);
1063         AccessFlags aflags;
1064         FieldInfo fi(aflags,
1065                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1066                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1067                     0,
1068                     fflags);
1069         int idx = _field_info->append(fi);
1070         _field_info->adr_at(idx)->set_index(idx);
1071       }
1072     }
1073   }
1074 
1075   prologue();
1076   inline_class_field_sorting();
1077 
1078   assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1079 
1080   if (_layout->super_has_fields() && !_is_abstract_value) {  // non-static field layout
1081     if (!_has_nonstatic_fields) {
1082       assert(_is_abstract_value, "Concrete value types have at least one field");
1083       // Nothing to do
1084     } else {
1085       // decide which alignment to use, then set first allowed field offset
1086 
1087       assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1088       assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1089 
1090       if (_payload_alignment < _layout->super_alignment()) {
1091         int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1092         assert(new_alignment % _payload_alignment == 0, "Must be");
1093         assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1094         _payload_alignment = new_alignment;
1095       }
1096       _layout->set_start(_layout->first_field_block());
1097     }
1098   } else {
1099     if (_is_abstract_value && _has_nonstatic_fields) {
1100       _payload_alignment = type2aelembytes(BasicType::T_LONG);
1101     }
1102     assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1103     LayoutRawBlock* first_empty = _layout->start()->next_block();
1104     if (first_empty->offset() % _payload_alignment != 0) {
1105       LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1106       _layout->insert(first_empty, padding);
1107       if (first_empty->size() == 0) {
1108         _layout->remove(first_empty);
1109       }
1110       _layout->set_start(padding);
1111     }
1112   }
1113 
1114   _layout->add(_root_group->big_primitive_fields());
1115   _layout->add(_root_group->oop_fields());
1116   _layout->add(_root_group->small_primitive_fields());
1117 
1118   LayoutRawBlock* first_field = _layout->first_field_block();
1119   if (first_field != nullptr) {
1120     _payload_offset = _layout->first_field_block()->offset();
1121     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1122   } else {
1123     assert(_is_abstract_value, "Concrete inline types must have at least one field");
1124     _payload_offset = _layout->blocks()->size();
1125     _payload_size_in_bytes = 0;
1126   }
1127 
1128   // Determining if the value class is naturally atomic:
1129   if ((!_layout->super_has_fields() && _declared_non_static_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1130       || (_layout->super_has_fields() && _super_klass->is_naturally_atomic() && _declared_non_static_fields_count == 0)) {
1131         _is_naturally_atomic = true;
1132   }
1133 
1134   // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1135   // From this, additional layouts will be computed: atomic and nullable layouts
1136   // Once those additional layouts are computed, the raw layout might need some adjustments
1137 
1138   bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1139 
1140   if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1141     // Validation of the non atomic layout
1142     if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1143       _non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1144       _non_atomic_layout_alignment = _payload_alignment;
1145     }
1146 
1147     // Next step is to compute the characteristics for a layout enabling atomic updates
1148     if (UseAtomicValueFlattening) {
1149       int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1150       if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1151         _atomic_layout_size_in_bytes = atomic_size;
1152       }
1153     }
1154 
1155     // Next step is the nullable layout: the layout must include a null marker and must also be atomic
1156     if (UseNullableValueFlattening) {
1157       // Looking if there's an empty slot inside the layout that could be used to store a null marker
1158       // FIXME: could it be possible to re-use the .empty field as a null marker for empty values?
1159       LayoutRawBlock* b = _layout->first_field_block();
1160       assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1161       int null_marker_offset = -1;
1162       if (_is_empty_inline_class) {
1163         // Reusing the dummy field as a field marker
1164         assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1165         null_marker_offset = b->offset();
1166       } else {
1167         while (b != _layout->last_block()) {
1168           if (b->block_kind() == LayoutRawBlock::EMPTY) {
1169             break;
1170           }
1171           b = b->next_block();
1172         }
1173         if (b != _layout->last_block()) {
1174           // found an empty slot, register its offset from the beginning of the payload
1175           null_marker_offset = b->offset();
1176           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1177           _layout->add_field_at_offset(marker, b->offset());
1178         }
1179         if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1180           int last_offset = _layout->last_block()->offset();
1181           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1182           _layout->insert_field_block(_layout->last_block(), marker);
1183           assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1184           null_marker_offset = marker->offset();
1185         }
1186       }
1187 
1188       // Now that the null marker is there, the size of the nullable layout must computed (remember, must be atomic too)
1189       int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1190       int nullable_size = round_up_power_of_2(new_raw_size);
1191       if (nullable_size <= (int)MAX_ATOMIC_OP_SIZE) {
1192         _nullable_layout_size_in_bytes = nullable_size;
1193         _null_marker_offset = null_marker_offset;
1194       } else {
1195         // If the nullable layout is rejected, the NULL_MARKER block should be removed
1196         // from the layout, otherwise it will appear anyway if the layout is printer
1197         if (!_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1198           _layout->remove_null_marker();
1199         }
1200         _null_marker_offset = -1;
1201       }
1202     }
1203     // If the inline class has an atomic or nullable (which is also atomic) layout,
1204     // we want the raw layout to have the same alignment as those atomic layouts so access codes
1205     // could remain  simple (single instruction without intermediate copy). This might required
1206     // to shift all fields in the raw layout, but this operation is possible only if the class
1207     // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1208     // field shift is needed but not possible, all atomic layouts are disabled and only reference
1209     // and loosely consistent are supported.
1210     int required_alignment = _payload_alignment;
1211     if (has_atomic_layout() && _payload_alignment < atomic_layout_size_in_bytes()) {
1212       required_alignment = atomic_layout_size_in_bytes();
1213     }
1214     if (has_nullable_atomic_layout() && _payload_alignment < nullable_layout_size_in_bytes()) {
1215       required_alignment = nullable_layout_size_in_bytes();
1216     }
1217     int shift = first_field->offset() % required_alignment;
1218     if (shift != 0) {
1219       if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1220         assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1221         _layout->shift_fields(shift);
1222         _payload_offset = _layout->first_field_block()->offset();
1223         if (has_nullable_atomic_layout()) {
1224           assert(!_is_empty_inline_class, "Should not get here with empty values");
1225           _null_marker_offset = _layout->find_null_marker()->offset();
1226         }
1227         _payload_alignment = required_alignment;
1228       } else {
1229         _atomic_layout_size_in_bytes = -1;
1230         if (has_nullable_atomic_layout() && !_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1231           _layout->remove_null_marker();
1232         }
1233         _nullable_layout_size_in_bytes = -1;
1234         _null_marker_offset = -1;
1235       }
1236     } else {
1237       _payload_alignment = required_alignment;
1238     }
1239 
1240     // If the inline class has a nullable layout, the layout used in heap allocated standalone
1241     // instances must also be the nullable layout, in order to be able to set the null marker to
1242     // non-null before copying the payload to other containers.
1243     if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_layout_size_in_bytes()) {
1244       _payload_size_in_bytes = nullable_layout_size_in_bytes();
1245     }
1246   }
1247   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1248   _static_layout->add_contiguously(_static_fields->oop_fields());
1249   _static_layout->add(_static_fields->big_primitive_fields());
1250   _static_layout->add(_static_fields->small_primitive_fields());
1251 
1252   epilogue();
1253 }
1254 
1255 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1256                 InlineKlass* vklass, int offset) {
1257   int diff = offset - vklass->payload_offset();
1258   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1259   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1260   while (map < last_map) {
1261     nonstatic_oop_maps->add(map->offset() + diff, map->count());
1262     map++;
1263   }
1264 }
1265 
1266 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1267   if (list == nullptr) return;
1268   for (int i = 0; i < list->length(); i++) {
1269     LayoutRawBlock* f = list->at(i);
1270     if (f->block_kind() == LayoutRawBlock::FLAT) {
1271       InlineKlass* vk = f->inline_klass();
1272       assert(vk != nullptr, "Should have been initialized");
1273       if (vk->contains_oops()) {
1274         add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1275       }
1276     }
1277   }
1278 }
1279 
1280 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1281   if (group->oop_fields() != nullptr) {
1282     for (int i = 0; i < group->oop_fields()->length(); i++) {
1283       LayoutRawBlock* b = group->oop_fields()->at(i);
1284       nonstatic_oop_maps->add(b->offset(), 1);
1285     }
1286   }
1287   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1288   register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1289 }
1290 
1291 void FieldLayoutBuilder::epilogue() {
1292   // Computing oopmaps
1293   OopMapBlocksBuilder* nonstatic_oop_maps =
1294       new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1295   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1296   if (super_oop_map_count > 0) {
1297     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1298     _super_klass->nonstatic_oop_map_count());
1299   }
1300   register_embedded_oops(nonstatic_oop_maps, _root_group);
1301   if (!_contended_groups.is_empty()) {
1302     for (int i = 0; i < _contended_groups.length(); i++) {
1303       FieldGroup* cg = _contended_groups.at(i);
1304       if (cg->oop_count() > 0) {
1305         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1306         register_embedded_oops(nonstatic_oop_maps, cg);
1307       }
1308     }
1309   }

1310   nonstatic_oop_maps->compact();
1311 
1312   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1313   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1314   int static_fields_size = (static_fields_end -
1315       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1316   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1317 
1318   // Pass back information needed for InstanceKlass creation
1319 
1320   _info->oop_map_blocks = nonstatic_oop_maps;
1321   _info->_instance_size = align_object_size(instance_end / wordSize);
1322   _info->_static_field_size = static_fields_size;
1323   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1324   _info->_has_nonstatic_fields = _has_nonstatic_fields;
1325   _info->_has_inline_fields = _has_inline_type_fields;
1326   _info->_is_naturally_atomic = _is_naturally_atomic;
1327   if (_is_inline_type) {
1328     _info->_must_be_atomic = _must_be_atomic;
1329     _info->_payload_alignment = _payload_alignment;
1330     _info->_payload_offset = _payload_offset;
1331     _info->_payload_size_in_bytes = _payload_size_in_bytes;
1332     _info->_non_atomic_size_in_bytes = _non_atomic_layout_size_in_bytes;
1333     _info->_non_atomic_alignment = _non_atomic_layout_alignment;
1334     _info->_atomic_layout_size_in_bytes = _atomic_layout_size_in_bytes;
1335     _info->_nullable_layout_size_in_bytes = _nullable_layout_size_in_bytes;
1336     _info->_null_marker_offset = _null_marker_offset;
1337     _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1338     _info->_is_empty_inline_klass = _is_empty_inline_class;
1339   }
1340 
1341   // This may be too restrictive, since if all the fields fit in 64
1342   // bits we could make the decision to align instances of this class
1343   // to 64-bit boundaries, and load and store them as single words.
1344   // And on machines which supported larger atomics we could similarly
1345   // allow larger values to be atomic, if properly aligned.
1346 
1347 #ifdef ASSERT
1348   // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1349   // which prints the details of LayoutRawBlocks used to compute the layout.
1350   // The code below checks that offsets in the _field_info meta-data match offsets
1351   // in the LayoutRawBlocks
1352   LayoutRawBlock* b = _layout->blocks();
1353   while(b != _layout->last_block()) {
1354     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1355       if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1356         tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1357       }
1358       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1359     }
1360     b = b->next_block();
1361   }
1362   b = _static_layout->blocks();
1363   while(b != _static_layout->last_block()) {
1364     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1365       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1366     }
1367     b = b->next_block();
1368   }
1369 #endif // ASSERT
1370 
1371   static bool first_layout_print = true;
1372 
1373 
1374   if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
1375     ResourceMark rm;
1376     stringStream st;
1377     if (first_layout_print) {
1378       st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1379       st.print_cr("Heap oop size = %d", heapOopSize);
1380       first_layout_print = false;
1381     }
1382     if (_super_klass != nullptr) {
1383       st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1384                     _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1385     } else {
1386       st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1387     }
1388     st.print_cr("Instance fields:");
1389     _layout->print(&st, false, _super_klass, _inline_layout_info_array);
1390     st.print_cr("Static fields:");
1391     _static_layout->print(&st, true, nullptr, _inline_layout_info_array);
1392     st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1393     if (_is_inline_type) {
1394       st.print_cr("First field offset = %d", _payload_offset);
1395       st.print_cr("Payload layout: %d/%d", _payload_size_in_bytes, _payload_alignment);
1396       if (has_non_atomic_flat_layout()) {
1397         st.print_cr("Non atomic flat layout: %d/%d", _non_atomic_layout_size_in_bytes, _non_atomic_layout_alignment);
1398       } else {
1399         st.print_cr("Non atomic flat layout: -/-");
1400       }
1401       if (has_atomic_layout()) {
1402         st.print_cr("Atomic flat layout: %d/%d", _atomic_layout_size_in_bytes, _atomic_layout_size_in_bytes);
1403       } else {
1404         st.print_cr("Atomic flat layout: -/-");
1405       }
1406       if (has_nullable_atomic_layout()) {
1407         st.print_cr("Nullable flat layout: %d/%d", _nullable_layout_size_in_bytes, _nullable_layout_size_in_bytes);
1408       } else {
1409         st.print_cr("Nullable flat layout: -/-");
1410       }
1411       if (_null_marker_offset != -1) {
1412         st.print_cr("Null marker offset = %d", _null_marker_offset);
1413       }
1414     }
1415     st.print_cr("---");
1416     // Print output all together.
1417     tty->print_raw(st.as_string());
1418   }
1419 }
1420 
1421 void FieldLayoutBuilder::build_layout() {
1422   if (_is_inline_type || _is_abstract_value) {
1423     compute_inline_class_layout();
1424   } else {
1425     compute_regular_layout();
1426   }
1427 }
< prev index next >