< prev index next >

src/hotspot/share/classfile/fieldLayoutBuilder.cpp

Print this page

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"


  27 #include "jvm.h"
  28 #include "memory/resourceArea.hpp"
  29 #include "oops/array.hpp"
  30 #include "oops/fieldStreams.inline.hpp"

  31 #include "oops/instanceKlass.inline.hpp"
  32 #include "oops/instanceMirrorKlass.hpp"
  33 #include "oops/klass.inline.hpp"
  34 #include "runtime/fieldDescriptor.inline.hpp"

  35 




























































  36 
  37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
  38   _next_block(nullptr),
  39   _prev_block(nullptr),
  40   _kind(kind),


  41   _offset(-1),
  42   _alignment(1),
  43   _size(size),
  44   _field_index(-1),
  45   _is_reference(false) {
  46   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
  47          "Otherwise, should use the constructor with a field index argument");
  48   assert(size > 0, "Sanity check");
  49 }
  50 
  51 
  52 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
  53  _next_block(nullptr),
  54  _prev_block(nullptr),
  55  _kind(kind),


  56  _offset(-1),
  57  _alignment(alignment),
  58  _size(size),
  59  _field_index(index),
  60  _is_reference(is_reference) {
  61   assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
  62          "Other kind do not have a field index");
  63   assert(size > 0, "Sanity check");
  64   assert(alignment > 0, "Sanity check");
  65 }
  66 
  67 bool LayoutRawBlock::fit(int size, int alignment) {
  68   int adjustment = 0;
  69   if ((_offset % alignment) != 0) {
  70     adjustment = alignment - (_offset % alignment);
  71   }
  72   return _size >= size + adjustment;
  73 }
  74 
  75 FieldGroup::FieldGroup(int contended_group) :
  76   _next(nullptr),
  77   _primitive_fields(nullptr),

  78   _oop_fields(nullptr),
  79   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
  80   _oop_count(0) {}
  81 
  82 void FieldGroup::add_primitive_field(int idx, BasicType type) {
  83   int size = type2aelembytes(type);
  84   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
  85   if (_primitive_fields == nullptr) {
  86     _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);


  87   }
  88   _primitive_fields->append(block);
  89 }
  90 
  91 void FieldGroup::add_oop_field(int idx) {
  92   int size = type2aelembytes(T_OBJECT);
  93   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
  94   if (_oop_fields == nullptr) {
  95     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
  96   }
  97   _oop_fields->append(block);
  98   _oop_count++;
  99 }
 100 











 101 void FieldGroup::sort_by_size() {
 102   if (_primitive_fields != nullptr) {
 103     _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
















 104   }

 105 }
 106 
 107 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
 108   _field_info(field_info),

 109   _cp(cp),
 110   _blocks(nullptr),
 111   _start(_blocks),
 112   _last(_blocks) {}







 113 
 114 void FieldLayout::initialize_static_layout() {
 115   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 116   _blocks->set_offset(0);
 117   _last = _blocks;
 118   _start = _blocks;
 119   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 120   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 121   // of static field is computed. Field offsets are fixed later when the size is known
 122   // (see java_lang_Class::fixup_mirror())
 123   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 124     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 125     _blocks->set_offset(0);
 126   }
 127 }
 128 
 129 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 130   if (super_klass == nullptr) {
 131     super_ends_with_oop = false;
 132     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 133     _blocks->set_offset(0);
 134     _last = _blocks;
 135     _start = _blocks;
 136     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 137   } else {
 138     bool super_has_instance_fields = false;
 139     reconstruct_layout(super_klass, super_has_instance_fields, super_ends_with_oop);
 140     fill_holes(super_klass);
 141     if (!super_klass->has_contended_annotations() || !super_has_instance_fields) {
 142       _start = _blocks;  // start allocating fields from the first empty block
 143     } else {
 144       _start = _last;    // append fields at the end of the reconstructed layout
 145     }
 146   }
 147 }
 148 
 149 LayoutRawBlock* FieldLayout::first_field_block() {
 150   LayoutRawBlock* block = _start;
 151   while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
 152       && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {



 153     block = block->next_block();
 154   }
 155   return block;
 156 }
 157 
 158 
 159 // Insert a set of fields into a layout using a best-fit strategy.
 160 // For each field, search for the smallest empty slot able to fit the field
 161 // (satisfying both size and alignment requirements), if none is found,
 162 // add the field at the end of the layout.
 163 // Fields cannot be inserted before the block specified in the "start" argument
 164 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 165   if (list == nullptr) return;
 166   if (start == nullptr) start = this->_start;
 167   bool last_search_success = false;
 168   int last_size = 0;
 169   int last_alignment = 0;
 170   for (int i = 0; i < list->length(); i ++) {
 171     LayoutRawBlock* b = list->at(i);
 172     LayoutRawBlock* cursor = nullptr;
 173     LayoutRawBlock* candidate = nullptr;
 174 
 175     // if start is the last block, just append the field
 176     if (start == last_block()) {
 177       candidate = last_block();
 178     }
 179     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 180     // check if the previous field had the same requirements and if the search for a fitting slot
 181     // was successful. If the requirements were the same but the search failed, a new search will
 182     // fail the same way, so just append the field at the of the layout.
 183     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 184       candidate = last_block();
 185     } else {
 186       // Iterate over the layout to find an empty slot fitting the field's requirements
 187       last_size = b->size();
 188       last_alignment = b->alignment();
 189       cursor = last_block()->prev_block();
 190       assert(cursor != nullptr, "Sanity check");
 191       last_search_success = true;

 192       while (cursor != start) {
 193         if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 194           if (candidate == nullptr || cursor->size() < candidate->size()) {
 195             candidate = cursor;
 196           }
 197         }
 198         cursor = cursor->prev_block();
 199       }
 200       if (candidate == nullptr) {
 201         candidate = last_block();
 202         last_search_success = false;
 203       }
 204       assert(candidate != nullptr, "Candidate must not be null");
 205       assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 206       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 207     }
 208 
 209     insert_field_block(candidate, b);
 210   }
 211 }
 212 
 213 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 214 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 215   assert(block != nullptr, "Sanity check");
 216   block->set_offset(offset);
 217   if (start == nullptr) {
 218     start = this->_start;
 219   }
 220   LayoutRawBlock* slot = start;
 221   while (slot != nullptr) {
 222     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 223         slot == _last){
 224       assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 225       assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
 226       if (slot->offset() < block->offset()) {
 227         int adjustment = block->offset() - slot->offset();
 228         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 229         insert(slot, adj);
 230       }
 231       insert(slot, block);
 232       if (slot->size() == 0) {
 233         remove(slot);
 234       }
 235       _field_info->adr_at(block->field_index())->set_offset(block->offset());


 236       return;
 237     }
 238     slot = slot->next_block();
 239   }
 240   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 241 }
 242 
 243 // The allocation logic uses a best fit strategy: the set of fields is allocated
 244 // in the first empty slot big enough to contain the whole set ((including padding
 245 // to fit alignment constraints).
 246 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 247   if (list == nullptr) return;
 248   if (start == nullptr) {
 249     start = _start;
 250   }
 251   // This code assumes that if the first block is well aligned, the following
 252   // blocks would naturally be well aligned (no need for adjustment)
 253   int size = 0;
 254   for (int i = 0; i < list->length(); i++) {
 255     size += list->at(i)->size();
 256   }
 257 
 258   LayoutRawBlock* candidate = nullptr;
 259   if (start == last_block()) {
 260     candidate = last_block();
 261   } else {
 262     LayoutRawBlock* first = list->at(0);
 263     candidate = last_block()->prev_block();
 264     while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 265       if (candidate == start) {
 266         candidate = last_block();
 267         break;
 268       }
 269       candidate = candidate->prev_block();
 270     }
 271     assert(candidate != nullptr, "Candidate must not be null");
 272     assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 273     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 274   }
 275 
 276   for (int i = 0; i < list->length(); i++) {
 277     LayoutRawBlock* b = list->at(i);
 278     insert_field_block(candidate, b);
 279     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 280   }
 281 }
 282 
 283 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 284   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 285   if (slot->offset() % block->alignment() != 0) {
 286     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 287     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 288     insert(slot, adj);
 289   }

 290   insert(slot, block);
 291   if (slot->size() == 0) {
 292     remove(slot);
 293   }
 294   _field_info->adr_at(block->field_index())->set_offset(block->offset());















 295   return block;
 296 }
 297 
 298 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
 299   has_instance_fields = ends_with_oop = false;



 300   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 301   BasicType last_type;
 302   int last_offset = -1;
 303   while (ik != nullptr) {
 304     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 305       BasicType type = Signature::basic_type(fs.signature());
 306       // distinction between static and non-static fields is missing
 307       if (fs.access_flags().is_static()) continue;
 308       has_instance_fields = true;





















 309       if (fs.offset() > last_offset) {
 310         last_offset = fs.offset();
 311         last_type = type;
 312       }
 313       int size = type2aelembytes(type);
 314       // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 315       LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
 316       block->set_offset(fs.offset());
 317       all_fields->append(block);
 318     }
 319     ik = ik->super() == nullptr ? nullptr : ik->super();
 320   }
 321   assert(last_offset == -1 || last_offset > 0, "Sanity");
 322   if (last_offset > 0 &&
 323       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 324     ends_with_oop = true;
 325   }
 326 
 327   all_fields->sort(LayoutRawBlock::compare_offset);
 328   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 329   _blocks->set_offset(0);
 330   _last = _blocks;
 331 
 332   for(int i = 0; i < all_fields->length(); i++) {
 333     LayoutRawBlock* b = all_fields->at(i);
 334     _last->set_next_block(b);
 335     b->set_prev_block(_last);
 336     _last = b;
 337   }
 338   _start = _blocks;
 339 }
 340 
 341 // Called during the reconstruction of a layout, after fields from super
 342 // classes have been inserted. It fills unused slots between inserted fields
 343 // with EMPTY blocks, so the regular field insertion methods would work.
 344 // This method handles classes with @Contended annotations differently
 345 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 346 // fields to interfere with contended fields/classes.
 347 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 348   assert(_blocks != nullptr, "Sanity check");
 349   assert(_blocks->offset() == 0, "first block must be at offset zero");
 350   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 351   LayoutRawBlock* b = _blocks;
 352   while (b->next_block() != nullptr) {
 353     if (b->next_block()->offset() > (b->offset() + b->size())) {
 354       int size = b->next_block()->offset() - (b->offset() + b->size());

 355       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 356       empty->set_offset(b->offset() + b->size());
 357       empty->set_next_block(b->next_block());
 358       b->next_block()->set_prev_block(empty);
 359       b->set_next_block(empty);
 360       empty->set_prev_block(b);
 361     }
 362     b = b->next_block();
 363   }
 364   assert(b->next_block() == nullptr, "Invariant at this point");
 365   assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
 366 
 367   // If the super class has @Contended annotation, a padding block is
 368   // inserted at the end to ensure that fields from the subclasses won't share
 369   // the cache line of the last field of the contended class
 370   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 371     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 372     p->set_offset(b->offset() + b->size());
 373     b->set_next_block(p);
 374     p->set_prev_block(b);
 375     b = p;
 376   }
 377 
 378   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 379   last->set_offset(b->offset() + b->size());
 380   assert(last->offset() > 0, "Sanity check");
 381   b->set_next_block(last);
 382   last->set_prev_block(b);
 383   _last = last;
 384 }
 385 
 386 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 387   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 388   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 389   block->set_offset(slot->offset());
 390   slot->set_offset(slot->offset() + block->size());
 391   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 392   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 393   slot->set_size(slot->size() - block->size());
 394   block->set_prev_block(slot->prev_block());
 395   block->set_next_block(slot);
 396   slot->set_prev_block(block);
 397   if (block->prev_block() != nullptr) {
 398     block->prev_block()->set_next_block(block);
 399   }
 400   if (_blocks == slot) {
 401     _blocks = block;
 402   }



 403   return block;
 404 }
 405 
 406 void FieldLayout::remove(LayoutRawBlock* block) {
 407   assert(block != nullptr, "Sanity check");
 408   assert(block != _last, "Sanity check");
 409   if (_blocks == block) {
 410     _blocks = block->next_block();
 411     if (_blocks != nullptr) {
 412       _blocks->set_prev_block(nullptr);
 413     }
 414   } else {
 415     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 416     block->prev_block()->set_next_block(block->next_block());
 417     block->next_block()->set_prev_block(block->prev_block());
 418   }
 419   if (block == _start) {
 420     _start = block->prev_block();
 421   }
 422 }
 423 
 424 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {


























































 425   ResourceMark rm;
 426   LayoutRawBlock* b = _blocks;
 427   while(b != _last) {
 428     switch(b->kind()) {
 429       case LayoutRawBlock::REGULAR: {
 430         FieldInfo* fi = _field_info->adr_at(b->field_index());
 431         output->print_cr(" @%d \"%s\" %s %d/%d %s",
 432                          b->offset(),
 433                          fi->name(_cp)->as_C_string(),
 434                          fi->signature(_cp)->as_C_string(),
 435                          b->size(),
 436                          b->alignment(),
 437                          "REGULAR");

 438         break;
 439       }
 440       case LayoutRawBlock::FLATTENED: {
 441         FieldInfo* fi = _field_info->adr_at(b->field_index());
 442         output->print_cr(" @%d \"%s\" %s %d/%d %s",


 443                          b->offset(),
 444                          fi->name(_cp)->as_C_string(),
 445                          fi->signature(_cp)->as_C_string(),
 446                          b->size(),
 447                          b->alignment(),
 448                          "FLATTENED");




 449         break;
 450       }
 451       case LayoutRawBlock::RESERVED: {
 452         output->print_cr(" @%d %d/- %s",
 453                          b->offset(),
 454                          b->size(),
 455                          "RESERVED");
 456         break;
 457       }
 458       case LayoutRawBlock::INHERITED: {
 459         assert(!is_static, "Static fields are not inherited in layouts");
 460         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 461         bool found = false;
 462         const InstanceKlass* ik = super;
 463         while (!found && ik != nullptr) {
 464           for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 465             if (fs.offset() == b->offset()) {
 466               output->print_cr(" @%d \"%s\" %s %d/%d %s",
 467                   b->offset(),
 468                   fs.name()->as_C_string(),
 469                   fs.signature()->as_C_string(),
 470                   b->size(),
 471                   b->size(), // so far, alignment constraint == size, will change with Valhalla
 472                   "INHERITED");

 473               found = true;
 474               break;
 475             }
 476           }
 477           ik = ik->super();
 478         }
 479         break;
 480       }
 481       case LayoutRawBlock::EMPTY:
 482         output->print_cr(" @%d %d/1 %s",
 483                          b->offset(),
 484                          b->size(),
 485                         "EMPTY");
 486         break;
 487       case LayoutRawBlock::PADDING:
 488         output->print_cr(" @%d %d/1 %s",
 489                          b->offset(),
 490                          b->size(),
 491                         "PADDING");
 492         break;












 493     }
 494     b = b->next_block();
 495   }
 496 }
 497 
 498 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 499       GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :

 500   _classname(classname),

 501   _super_klass(super_klass),
 502   _constant_pool(constant_pool),
 503   _field_info(field_info),
 504   _info(info),

 505   _root_group(nullptr),
 506   _contended_groups(GrowableArray<FieldGroup*>(8)),
 507   _static_fields(nullptr),
 508   _layout(nullptr),
 509   _static_layout(nullptr),
 510   _nonstatic_oopmap_count(0),
 511   _alignment(-1),












 512   _has_nonstatic_fields(false),
 513   _is_contended(is_contended) {}
 514 




 515 
 516 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 517   assert(g > 0, "must only be called for named contended groups");
 518   FieldGroup* fg = nullptr;
 519   for (int i = 0; i < _contended_groups.length(); i++) {
 520     fg = _contended_groups.at(i);
 521     if (fg->contended_group() == g) return fg;
 522   }
 523   fg = new FieldGroup(g);
 524   _contended_groups.append(fg);
 525   return fg;
 526 }
 527 
 528 void FieldLayoutBuilder::prologue() {
 529   _layout = new FieldLayout(_field_info, _constant_pool);
 530   const InstanceKlass* super_klass = _super_klass;
 531   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);

 532   if (super_klass != nullptr) {
 533     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 534   }
 535   _static_layout = new FieldLayout(_field_info, _constant_pool);
 536   _static_layout->initialize_static_layout();
 537   _static_fields = new FieldGroup();
 538   _root_group = new FieldGroup();
 539 }
 540 
 541 // Field sorting for regular classes:
 542 //   - fields are sorted in static and non-static fields
 543 //   - non-static fields are also sorted according to their contention group
 544 //     (support of the @Contended annotation)
 545 //   - @Contended annotation is ignored for static fields

 546 void FieldLayoutBuilder::regular_field_sorting() {
 547   int idx = 0;
 548   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 549     FieldInfo ctrl = _field_info->at(0);
 550     FieldGroup* group = nullptr;
 551     FieldInfo fieldinfo = *it;
 552     if (fieldinfo.access_flags().is_static()) {
 553       group = _static_fields;
 554     } else {
 555       _has_nonstatic_fields = true;
 556       if (fieldinfo.field_flags().is_contended()) {
 557         int g = fieldinfo.contended_group();
 558         if (g == 0) {
 559           group = new FieldGroup(true);
 560           _contended_groups.append(group);
 561         } else {
 562           group = get_or_create_contended_group(g);
 563         }
 564       } else {
 565         group = _root_group;
 566       }
 567     }
 568     assert(group != nullptr, "invariant");
 569     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 570     switch(type) {
 571       case T_BYTE:
 572       case T_CHAR:
 573       case T_DOUBLE:
 574       case T_FLOAT:
 575       case T_INT:
 576       case T_LONG:
 577       case T_SHORT:
 578       case T_BOOLEAN:
 579         group->add_primitive_field(idx, type);
 580         break;
 581       case T_OBJECT:
 582       case T_ARRAY:










 583         if (group != _static_fields) _nonstatic_oopmap_count++;
 584         group->add_oop_field(idx);
 585         break;
 586       default:
 587         fatal("Something wrong?");













 588     }
 589   }
 590   _root_group->sort_by_size();
 591   _static_fields->sort_by_size();
 592   if (!_contended_groups.is_empty()) {
 593     for (int i = 0; i < _contended_groups.length(); i++) {
 594       _contended_groups.at(i)->sort_by_size();
 595     }
 596   }
 597 }
 598 




















































































 599 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 600   if (ContendedPaddingWidth > 0) {
 601     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 602     _layout->insert(slot, padding);
 603   }
 604 }
 605 
 606 // Computation of regular classes layout is an evolution of the previous default layout
 607 // (FieldAllocationStyle 1):
 608 //   - primitive fields are allocated first (from the biggest to the smallest)

 609 //   - oop fields are allocated, either in existing gaps or at the end of
 610 //     the layout. We allocate oops in a single block to have a single oop map entry.
 611 //   - if the super class ended with an oop, we lead with oops. That will cause the
 612 //     trailing oop map entry of the super class and the oop map entry of this class
 613 //     to be folded into a single entry later. Correspondingly, if the super class
 614 //     ends with a primitive field, we gain nothing by leading with oops; therefore
 615 //     we let oop fields trail, thus giving future derived classes the chance to apply
 616 //     the same trick.
 617 void FieldLayoutBuilder::compute_regular_layout() {
 618   bool need_tail_padding = false;
 619   prologue();
 620   regular_field_sorting();
 621 
 622   if (_is_contended) {
 623     _layout->set_start(_layout->last_block());
 624     // insertion is currently easy because the current strategy doesn't try to fill holes
 625     // in super classes layouts => the _start block is by consequence the _last_block
 626     insert_contended_padding(_layout->start());
 627     need_tail_padding = true;
 628   }
 629 
 630   if (_super_ends_with_oop) {
 631     _layout->add(_root_group->oop_fields());
 632     _layout->add(_root_group->primitive_fields());

 633   } else {
 634     _layout->add(_root_group->primitive_fields());

 635     _layout->add(_root_group->oop_fields());
 636   }
 637 
 638   if (!_contended_groups.is_empty()) {
 639     for (int i = 0; i < _contended_groups.length(); i++) {
 640       FieldGroup* cg = _contended_groups.at(i);
 641       LayoutRawBlock* start = _layout->last_block();
 642       insert_contended_padding(start);
 643       _layout->add(cg->primitive_fields(), start);

 644       _layout->add(cg->oop_fields(), start);
 645       need_tail_padding = true;
 646     }
 647   }
 648 
 649   if (need_tail_padding) {
 650     insert_contended_padding(_layout->last_block());
 651   }
 652 
 653   _static_layout->add_contiguously(this->_static_fields->oop_fields());
 654   _static_layout->add(this->_static_fields->primitive_fields());


 655 
 656   epilogue();
 657 }
 658 
 659 void FieldLayoutBuilder::epilogue() {
 660   // Computing oopmaps
 661   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
 662   int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;









 663 
 664   OopMapBlocksBuilder* nonstatic_oop_maps =
 665       new OopMapBlocksBuilder(max_oop_map_count);
 666   if (super_oop_map_count > 0) {
 667     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
 668     _super_klass->nonstatic_oop_map_count());

































































































































































































































 669   }















 670 
 671   if (_root_group->oop_fields() != nullptr) {
 672     for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
 673       LayoutRawBlock* b = _root_group->oop_fields()->at(i);

 674       nonstatic_oop_maps->add(b->offset(), 1);
 675     }
 676   }



 677 
































































































































































 678   if (!_contended_groups.is_empty()) {
 679     for (int i = 0; i < _contended_groups.length(); i++) {
 680       FieldGroup* cg = _contended_groups.at(i);
 681       if (cg->oop_count() > 0) {
 682         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
 683         nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
 684       }
 685     }
 686   }
 687 
 688   nonstatic_oop_maps->compact();
 689 
 690   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
 691   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
 692   int static_fields_size = (static_fields_end -
 693       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
 694   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
 695 
 696   // Pass back information needed for InstanceKlass creation
 697 
 698   _info->oop_map_blocks = nonstatic_oop_maps;
 699   _info->_instance_size = align_object_size(instance_end / wordSize);
 700   _info->_static_field_size = static_fields_size;
 701   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
 702   _info->_has_nonstatic_fields = _has_nonstatic_fields;






















































 703 
 704   if (PrintFieldLayout) {

 705     ResourceMark rm;
 706     tty->print_cr("Layout of class %s", _classname->as_C_string());
 707     tty->print_cr("Instance fields:");
 708     _layout->print(tty, false, _super_klass);
 709     tty->print_cr("Static fields:");
 710     _static_layout->print(tty, true, nullptr);
 711     tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
 712     tty->print_cr("---");

























































 713   }
 714 }
 715 
 716 void FieldLayoutBuilder::build_layout() {
 717   compute_regular_layout();




 718 }

   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "classfile/classFileParser.hpp"
  26 #include "classfile/fieldLayoutBuilder.hpp"
  27 #include "classfile/systemDictionary.hpp"
  28 #include "classfile/vmSymbols.hpp"
  29 #include "jvm.h"
  30 #include "memory/resourceArea.hpp"
  31 #include "oops/array.hpp"
  32 #include "oops/fieldStreams.inline.hpp"
  33 #include "oops/inlineKlass.inline.hpp"
  34 #include "oops/instanceKlass.inline.hpp"
  35 #include "oops/instanceMirrorKlass.hpp"
  36 #include "oops/klass.inline.hpp"
  37 #include "runtime/fieldDescriptor.inline.hpp"
  38 #include "utilities/powerOfTwo.hpp"
  39 
  40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
  41                                          bool use_atomic_flat) {
  42 
  43   if (!UseFieldFlattening) {
  44     return LayoutKind::REFERENCE;
  45   }
  46 
  47   if (field_info.field_flags().is_injected()) {
  48     // don't flatten injected fields
  49     return LayoutKind::REFERENCE;
  50   }
  51 
  52   if (field_info.access_flags().is_volatile()) {
  53     // volatile is used as a keyword to prevent flattening
  54     return LayoutKind::REFERENCE;
  55   }
  56 
  57   if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
  58     // field's type is not a known value class, using a reference
  59     return LayoutKind::REFERENCE;
  60   }
  61 
  62   InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
  63   InlineKlass* vk = inline_field_info->klass();
  64 
  65   if (field_info.field_flags().is_null_free_inline_type()) {
  66     assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
  67     if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
  68       if (vk->is_naturally_atomic() && vk->has_non_atomic_layout()) return LayoutKind::NULL_FREE_NON_ATOMIC_FLAT;
  69       return (vk->has_atomic_layout() && use_atomic_flat) ? LayoutKind::NULL_FREE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  70     } else {
  71       return vk->has_non_atomic_layout() ? LayoutKind::NULL_FREE_NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
  72     }
  73   } else {
  74     if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
  75       return use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
  76     } else {
  77       return LayoutKind::REFERENCE;
  78     }
  79   }
  80 }
  81 
  82 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
  83   switch(kind) {
  84     case LayoutKind::NULL_FREE_NON_ATOMIC_FLAT:
  85       *size = vk->non_atomic_size_in_bytes();
  86       *alignment = vk->non_atomic_alignment();
  87       break;
  88     case LayoutKind::NULL_FREE_ATOMIC_FLAT:
  89       *size = vk->atomic_size_in_bytes();
  90       *alignment = *size;
  91       break;
  92     case LayoutKind::NULLABLE_ATOMIC_FLAT:
  93       *size = vk->nullable_atomic_size_in_bytes();
  94       *alignment = *size;
  95     break;
  96     default:
  97       ShouldNotReachHere();
  98   }
  99 }
 100 
 101 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
 102   _next_block(nullptr),
 103   _prev_block(nullptr),
 104   _inline_klass(nullptr),
 105   _block_kind(kind),
 106   _layout_kind(LayoutKind::UNKNOWN),
 107   _offset(-1),
 108   _alignment(1),
 109   _size(size),
 110   _field_index(-1) {
 111   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,

 112          "Otherwise, should use the constructor with a field index argument");
 113   assert(size > 0, "Sanity check");
 114 }
 115 
 116 
 117 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
 118  _next_block(nullptr),
 119  _prev_block(nullptr),
 120  _inline_klass(nullptr),
 121  _block_kind(kind),
 122  _layout_kind(LayoutKind::UNKNOWN),
 123  _offset(-1),
 124  _alignment(alignment),
 125  _size(size),
 126  _field_index(index) {
 127   assert(kind == REGULAR || kind == FLAT || kind == INHERITED,

 128          "Other kind do not have a field index");
 129   assert(size > 0, "Sanity check");
 130   assert(alignment > 0, "Sanity check");
 131 }
 132 
 133 bool LayoutRawBlock::fit(int size, int alignment) {
 134   int adjustment = 0;
 135   if ((_offset % alignment) != 0) {
 136     adjustment = alignment - (_offset % alignment);
 137   }
 138   return _size >= size + adjustment;
 139 }
 140 
 141 FieldGroup::FieldGroup(int contended_group) :
 142   _next(nullptr),
 143   _small_primitive_fields(nullptr),
 144   _big_primitive_fields(nullptr),
 145   _oop_fields(nullptr),
 146   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
 147   _oop_count(0) {}
 148 
 149 void FieldGroup::add_primitive_field(int idx, BasicType type) {
 150   int size = type2aelembytes(type);
 151   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
 152   if (size >= oopSize) {
 153     add_to_big_primitive_list(block);
 154   } else {
 155     add_to_small_primitive_list(block);
 156   }

 157 }
 158 
 159 void FieldGroup::add_oop_field(int idx) {
 160   int size = type2aelembytes(T_OBJECT);
 161   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
 162   if (_oop_fields == nullptr) {
 163     _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 164   }
 165   _oop_fields->append(block);
 166   _oop_count++;
 167 }
 168 
 169 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
 170   LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
 171   block->set_inline_klass(vk);
 172   block->set_layout_kind(lk);
 173   if (block->size() >= oopSize) {
 174     add_to_big_primitive_list(block);
 175   } else {
 176     add_to_small_primitive_list(block);
 177   }
 178 }
 179 
 180 void FieldGroup::sort_by_size() {
 181   if (_small_primitive_fields != nullptr) {
 182     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 183   }
 184   if (_big_primitive_fields != nullptr) {
 185     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 186   }
 187 }
 188 
 189 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 190   if (_small_primitive_fields == nullptr) {
 191     _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 192   }
 193   _small_primitive_fields->append(block);
 194 }
 195 
 196 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 197   if (_big_primitive_fields == nullptr) {
 198     _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 199   }
 200   _big_primitive_fields->append(block);
 201 }
 202 
 203 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
 204   _field_info(field_info),
 205   _inline_layout_info_array(inline_layout_info_array),
 206   _cp(cp),
 207   _blocks(nullptr),
 208   _start(_blocks),
 209   _last(_blocks),
 210   _super_first_field_offset(-1),
 211   _super_alignment(-1),
 212   _super_min_align_required(-1),
 213   _null_reset_value_offset(-1),
 214   _acmp_maps_offset(-1),
 215   _super_has_fields(false),
 216   _has_inherited_fields(false) {}
 217 
 218 void FieldLayout::initialize_static_layout() {
 219   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 220   _blocks->set_offset(0);
 221   _last = _blocks;
 222   _start = _blocks;
 223   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 224   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 225   // of static field is computed. Field offsets are fixed later when the size is known
 226   // (see java_lang_Class::fixup_mirror())
 227   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 228     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 229     _blocks->set_offset(0);
 230   }
 231 }
 232 
 233 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass, bool& super_ends_with_oop) {
 234   if (super_klass == nullptr) {
 235     super_ends_with_oop = false;
 236     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 237     _blocks->set_offset(0);
 238     _last = _blocks;
 239     _start = _blocks;
 240     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 241   } else {
 242     reconstruct_layout(super_klass, _super_has_fields, super_ends_with_oop);

 243     fill_holes(super_klass);
 244     if ((!super_klass->has_contended_annotations()) || !_super_has_fields) {
 245       _start = _blocks;  // start allocating fields from the first empty block
 246     } else {
 247       _start = _last;    // append fields at the end of the reconstructed layout
 248     }
 249   }
 250 }
 251 
 252 LayoutRawBlock* FieldLayout::first_field_block() {
 253   LayoutRawBlock* block = _blocks;
 254   while (block != nullptr
 255          && block->block_kind() != LayoutRawBlock::INHERITED
 256          && block->block_kind() != LayoutRawBlock::REGULAR
 257          && block->block_kind() != LayoutRawBlock::FLAT
 258          && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 259     block = block->next_block();
 260   }
 261   return block;
 262 }
 263 
 264 // Insert a set of fields into a layout.
 265 // For each field, search for an empty slot able to fit the field

 266 // (satisfying both size and alignment requirements), if none is found,
 267 // add the field at the end of the layout.
 268 // Fields cannot be inserted before the block specified in the "start" argument
 269 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 270   if (list == nullptr) return;
 271   if (start == nullptr) start = this->_start;
 272   bool last_search_success = false;
 273   int last_size = 0;
 274   int last_alignment = 0;
 275   for (int i = 0; i < list->length(); i ++) {
 276     LayoutRawBlock* b = list->at(i);
 277     LayoutRawBlock* cursor = nullptr;
 278     LayoutRawBlock* candidate = nullptr;

 279     // if start is the last block, just append the field
 280     if (start == last_block()) {
 281       candidate = last_block();
 282     }
 283     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 284     // check if the previous field had the same requirements and if the search for a fitting slot
 285     // was successful. If the requirements were the same but the search failed, a new search will
 286     // fail the same way, so just append the field at the of the layout.
 287     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 288       candidate = last_block();
 289     } else {
 290       // Iterate over the layout to find an empty slot fitting the field's requirements
 291       last_size = b->size();
 292       last_alignment = b->alignment();
 293       cursor = last_block()->prev_block();
 294       assert(cursor != nullptr, "Sanity check");
 295       last_search_success = true;
 296 
 297       while (cursor != start) {
 298         if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 299           if (candidate == nullptr || cursor->size() < candidate->size()) {
 300             candidate = cursor;
 301           }
 302         }
 303         cursor = cursor->prev_block();
 304       }
 305       if (candidate == nullptr) {
 306         candidate = last_block();
 307         last_search_success = false;
 308       }
 309       assert(candidate != nullptr, "Candidate must not be null");
 310       assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 311       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 312     }

 313     insert_field_block(candidate, b);
 314   }
 315 }
 316 
 317 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 318 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 319   assert(block != nullptr, "Sanity check");
 320   block->set_offset(offset);
 321   if (start == nullptr) {
 322     start = this->_start;
 323   }
 324   LayoutRawBlock* slot = start;
 325   while (slot != nullptr) {
 326     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 327         slot == _last){
 328       assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 329       assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
 330       if (slot->offset() < block->offset()) {
 331         int adjustment = block->offset() - slot->offset();
 332         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 333         insert(slot, adj);
 334       }
 335       insert(slot, block);
 336       if (slot->size() == 0) {
 337         remove(slot);
 338       }
 339       if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
 340         _field_info->adr_at(block->field_index())->set_offset(block->offset());
 341       }
 342       return;
 343     }
 344     slot = slot->next_block();
 345   }
 346   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 347 }
 348 
 349 // The allocation logic uses a best fit strategy: the set of fields is allocated
 350 // in the first empty slot big enough to contain the whole set ((including padding
 351 // to fit alignment constraints).
 352 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 353   if (list == nullptr) return;
 354   if (start == nullptr) {
 355     start = _start;
 356   }
 357   // This code assumes that if the first block is well aligned, the following
 358   // blocks would naturally be well aligned (no need for adjustment)
 359   int size = 0;
 360   for (int i = 0; i < list->length(); i++) {
 361     size += list->at(i)->size();
 362   }
 363 
 364   LayoutRawBlock* candidate = nullptr;
 365   if (start == last_block()) {
 366     candidate = last_block();
 367   } else {
 368     LayoutRawBlock* first = list->at(0);
 369     candidate = last_block()->prev_block();
 370     while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 371       if (candidate == start) {
 372         candidate = last_block();
 373         break;
 374       }
 375       candidate = candidate->prev_block();
 376     }
 377     assert(candidate != nullptr, "Candidate must not be null");
 378     assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 379     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 380   }
 381 
 382   for (int i = 0; i < list->length(); i++) {
 383     LayoutRawBlock* b = list->at(i);
 384     insert_field_block(candidate, b);
 385     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 386   }
 387 }
 388 
 389 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 390   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 391   if (slot->offset() % block->alignment() != 0) {
 392     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 393     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 394     insert(slot, adj);
 395   }
 396   assert(block->size() >= block->size(), "Enough space must remain after adjustment");
 397   insert(slot, block);
 398   if (slot->size() == 0) {
 399     remove(slot);
 400   }
 401   // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
 402   if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
 403     _field_info->adr_at(block->field_index())->set_offset(block->offset());
 404     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
 405       _null_reset_value_offset = block->offset();
 406     }
 407     if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::acmp_maps_name()) {
 408       _acmp_maps_offset = block->offset();
 409     }
 410   }
 411   if (LayoutKindHelper::is_nullable_flat(block->layout_kind())) {
 412     int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
 413     _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 414     _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
 415   }
 416 
 417   return block;
 418 }
 419 
 420 void FieldLayout::reconstruct_layout(const InstanceKlass* ik, bool& has_instance_fields, bool& ends_with_oop) {
 421   has_instance_fields = ends_with_oop = false;
 422   if (ik->is_abstract() && !ik->is_identity_class()) {
 423     _super_alignment = type2aelembytes(BasicType::T_LONG);
 424   }
 425   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 426   BasicType last_type;
 427   int last_offset = -1;
 428   while (ik != nullptr) {
 429     for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 430       BasicType type = Signature::basic_type(fs.signature());
 431       // distinction between static and non-static fields is missing
 432       if (fs.access_flags().is_static()) continue;
 433       has_instance_fields = true;
 434       _has_inherited_fields = true;
 435       if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) {
 436         _super_first_field_offset = fs.offset();
 437       }
 438       LayoutRawBlock* block;
 439       if (fs.is_flat()) {
 440         InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
 441         InlineKlass* vk = layout_info.klass();
 442         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
 443                                    vk->layout_size_in_bytes(layout_info.kind()),
 444                                    vk->layout_alignment(layout_info.kind()));
 445         assert(_super_alignment == -1 || _super_alignment >=  vk->payload_alignment(), "Invalid value alignment");
 446         _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
 447       } else {
 448         int size = type2aelembytes(type);
 449         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 450         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
 451         // For primitive types, the alignment is equal to the size
 452         assert(_super_alignment == -1 || _super_alignment >=  size, "Invalid value alignment");
 453         _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
 454       }
 455       if (fs.offset() > last_offset) {
 456         last_offset = fs.offset();
 457         last_type = type;
 458       }



 459       block->set_offset(fs.offset());
 460       all_fields->append(block);
 461     }
 462     ik = ik->super() == nullptr ? nullptr : ik->super();
 463   }
 464   assert(last_offset == -1 || last_offset > 0, "Sanity");
 465   if (last_offset > 0 &&
 466       (last_type == BasicType::T_ARRAY || last_type == BasicType::T_OBJECT)) {
 467     ends_with_oop = true;
 468   }
 469 
 470   all_fields->sort(LayoutRawBlock::compare_offset);
 471   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 472   _blocks->set_offset(0);
 473   _last = _blocks;

 474   for(int i = 0; i < all_fields->length(); i++) {
 475     LayoutRawBlock* b = all_fields->at(i);
 476     _last->set_next_block(b);
 477     b->set_prev_block(_last);
 478     _last = b;
 479   }
 480   _start = _blocks;
 481 }
 482 
 483 // Called during the reconstruction of a layout, after fields from super
 484 // classes have been inserted. It fills unused slots between inserted fields
 485 // with EMPTY blocks, so the regular field insertion methods would work.
 486 // This method handles classes with @Contended annotations differently
 487 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 488 // fields to interfere with contended fields/classes.
 489 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 490   assert(_blocks != nullptr, "Sanity check");
 491   assert(_blocks->offset() == 0, "first block must be at offset zero");
 492   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 493   LayoutRawBlock* b = _blocks;
 494   while (b->next_block() != nullptr) {
 495     if (b->next_block()->offset() > (b->offset() + b->size())) {
 496       int size = b->next_block()->offset() - (b->offset() + b->size());
 497       // FIXME it would be better if initial empty block where tagged as PADDING for value classes
 498       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 499       empty->set_offset(b->offset() + b->size());
 500       empty->set_next_block(b->next_block());
 501       b->next_block()->set_prev_block(empty);
 502       b->set_next_block(empty);
 503       empty->set_prev_block(b);
 504     }
 505     b = b->next_block();
 506   }
 507   assert(b->next_block() == nullptr, "Invariant at this point");
 508   assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");

 509   // If the super class has @Contended annotation, a padding block is
 510   // inserted at the end to ensure that fields from the subclasses won't share
 511   // the cache line of the last field of the contended class
 512   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 513     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 514     p->set_offset(b->offset() + b->size());
 515     b->set_next_block(p);
 516     p->set_prev_block(b);
 517     b = p;
 518   }
 519 
 520   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 521   last->set_offset(b->offset() + b->size());
 522   assert(last->offset() > 0, "Sanity check");
 523   b->set_next_block(last);
 524   last->set_prev_block(b);
 525   _last = last;
 526 }
 527 
 528 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 529   assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 530   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 531   block->set_offset(slot->offset());
 532   slot->set_offset(slot->offset() + block->size());
 533   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 534   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 535   slot->set_size(slot->size() - block->size());
 536   block->set_prev_block(slot->prev_block());
 537   block->set_next_block(slot);
 538   slot->set_prev_block(block);
 539   if (block->prev_block() != nullptr) {
 540     block->prev_block()->set_next_block(block);
 541   }
 542   if (_blocks == slot) {
 543     _blocks = block;
 544   }
 545   if (_start == slot) {
 546     _start = block;
 547   }
 548   return block;
 549 }
 550 
 551 void FieldLayout::remove(LayoutRawBlock* block) {
 552   assert(block != nullptr, "Sanity check");
 553   assert(block != _last, "Sanity check");
 554   if (_blocks == block) {
 555     _blocks = block->next_block();
 556     if (_blocks != nullptr) {
 557       _blocks->set_prev_block(nullptr);
 558     }
 559   } else {
 560     assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
 561     block->prev_block()->set_next_block(block->next_block());
 562     block->next_block()->set_prev_block(block->prev_block());
 563   }
 564   if (block == _start) {
 565     _start = block->prev_block();
 566   }
 567 }
 568 
 569 void FieldLayout::shift_fields(int shift) {
 570   LayoutRawBlock* b = first_field_block();
 571   LayoutRawBlock* previous = b->prev_block();
 572   if (previous->block_kind() == LayoutRawBlock::EMPTY) {
 573     previous->set_size(previous->size() + shift);
 574   } else {
 575     LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
 576     nb->set_offset(b->offset());
 577     previous->set_next_block(nb);
 578     nb->set_prev_block(previous);
 579     b->set_prev_block(nb);
 580     nb->set_next_block(b);
 581   }
 582   while (b != nullptr) {
 583     b->set_offset(b->offset() + shift);
 584     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
 585       _field_info->adr_at(b->field_index())->set_offset(b->offset());
 586       if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
 587         int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
 588         _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 589         _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
 590       }
 591     }
 592     assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
 593     b = b->next_block();
 594   }
 595 }
 596 
 597 LayoutRawBlock* FieldLayout::find_null_marker() {
 598   LayoutRawBlock* b = _blocks;
 599   while (b != nullptr) {
 600     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 601       return b;
 602     }
 603     b = b->next_block();
 604   }
 605   ShouldNotReachHere();
 606 }
 607 
 608 void FieldLayout::remove_null_marker() {
 609   LayoutRawBlock* b = first_field_block();
 610   while (b != nullptr) {
 611     if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
 612       if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
 613         LayoutRawBlock* n = b->next_block();
 614         remove(b);
 615         n->set_offset(b->offset());
 616         n->set_size(n->size() + b->size());
 617       } else {
 618         b->set_block_kind(LayoutRawBlock::EMPTY);
 619       }
 620       return;
 621     }
 622     b = b->next_block();
 623   }
 624   ShouldNotReachHere(); // if we reach this point, the null marker was not found!
 625 }
 626 
 627 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields) {
 628   ResourceMark rm;
 629   LayoutRawBlock* b = _blocks;
 630   while(b != _last) {
 631     switch(b->block_kind()) {
 632       case LayoutRawBlock::REGULAR: {
 633         FieldInfo* fi = _field_info->adr_at(b->field_index());
 634         output->print_cr(" @%d %s %d/%d \"%s\" %s",
 635                          b->offset(),
 636                          "REGULAR",

 637                          b->size(),
 638                          b->alignment(),
 639                          fi->name(_cp)->as_C_string(),
 640                          fi->signature(_cp)->as_C_string());
 641         break;
 642       }
 643       case LayoutRawBlock::FLAT: {
 644         FieldInfo* fi = _field_info->adr_at(b->field_index());
 645         InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
 646         assert(ik != nullptr, "");
 647         output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
 648                          b->offset(),
 649                          "FLAT",

 650                          b->size(),
 651                          b->alignment(),
 652                          fi->name(_cp)->as_C_string(),
 653                          fi->signature(_cp)->as_C_string(),
 654                          ik->name()->as_C_string(),
 655                          ik->class_loader_data(),
 656                          LayoutKindHelper::layout_kind_as_string(b->layout_kind()));
 657         break;
 658       }
 659       case LayoutRawBlock::RESERVED: {
 660         output->print_cr(" @%d %s %d/-",
 661                          b->offset(),
 662                          "RESERVED",
 663                          b->size());
 664         break;
 665       }
 666       case LayoutRawBlock::INHERITED: {
 667         assert(!is_static, "Static fields are not inherited in layouts");
 668         assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
 669         bool found = false;
 670         const InstanceKlass* ik = super;
 671         while (!found && ik != nullptr) {
 672           for (AllFieldStream fs(ik); !fs.done(); fs.next()) {
 673             if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
 674               output->print_cr(" @%d %s %d/%d \"%s\" %s",
 675                   b->offset(),
 676                   "INHERITED",

 677                   b->size(),
 678                   b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
 679                   fs.name()->as_C_string(),
 680                   fs.signature()->as_C_string());
 681               found = true;
 682               break;
 683             }


 684         }
 685         ik = ik->super();
 686       }
 687       break;
 688     }
 689     case LayoutRawBlock::EMPTY:
 690       output->print_cr(" @%d %s %d/1",
 691                        b->offset(),
 692                       "EMPTY",
 693                        b->size());
 694       break;
 695     case LayoutRawBlock::PADDING:
 696       output->print_cr(" @%d %s %d/1",
 697                       b->offset(),
 698                       "PADDING",
 699                       b->size());
 700       break;
 701     case LayoutRawBlock::NULL_MARKER:
 702     {
 703       output->print_cr(" @%d %s %d/1 ",
 704                       b->offset(),
 705                       "NULL_MARKER",
 706                       b->size());
 707       break;
 708     }
 709     default:
 710       fatal("Unknown block type");
 711     }
 712     b = b->next_block();
 713   }
 714 }
 715 
 716 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 717                                        GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
 718                                        bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
 719   _classname(classname),
 720   _loader_data(loader_data),
 721   _super_klass(super_klass),
 722   _constant_pool(constant_pool),
 723   _field_info(field_info),
 724   _info(info),
 725   _inline_layout_info_array(inline_layout_info_array),
 726   _root_group(nullptr),
 727   _contended_groups(GrowableArray<FieldGroup*>(8)),
 728   _static_fields(nullptr),
 729   _layout(nullptr),
 730   _static_layout(nullptr),
 731   _nonstatic_oopmap_count(0),
 732   _payload_alignment(-1),
 733   _payload_offset(-1),
 734   _null_marker_offset(-1),
 735   _payload_size_in_bytes(-1),
 736   _non_atomic_layout_size_in_bytes(-1),
 737   _non_atomic_layout_alignment(-1),
 738   _atomic_layout_size_in_bytes(-1),
 739   _nullable_layout_size_in_bytes(-1),
 740   _fields_size_sum(0),
 741   _declared_non_static_fields_count(0),
 742   _has_non_naturally_atomic_fields(false),
 743   _is_naturally_atomic(false),
 744   _must_be_atomic(must_be_atomic),
 745   _has_nonstatic_fields(false),
 746   _has_inline_type_fields(false),
 747   _is_contended(is_contended),
 748   _is_inline_type(is_inline_type),
 749   _is_abstract_value(is_abstract_value),
 750   _has_flattening_information(is_inline_type),
 751   _is_empty_inline_class(false) {}
 752 
 753 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 754   assert(g > 0, "must only be called for named contended groups");
 755   FieldGroup* fg = nullptr;
 756   for (int i = 0; i < _contended_groups.length(); i++) {
 757     fg = _contended_groups.at(i);
 758     if (fg->contended_group() == g) return fg;
 759   }
 760   fg = new FieldGroup(g);
 761   _contended_groups.append(fg);
 762   return fg;
 763 }
 764 
 765 void FieldLayoutBuilder::prologue() {
 766   _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 767   const InstanceKlass* super_klass = _super_klass;
 768   _layout->initialize_instance_layout(super_klass, _super_ends_with_oop);
 769   _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
 770   if (super_klass != nullptr) {
 771     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 772   }
 773   _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
 774   _static_layout->initialize_static_layout();
 775   _static_fields = new FieldGroup();
 776   _root_group = new FieldGroup();
 777 }
 778 
 779 // Field sorting for regular (non-inline) classes:
 780 //   - fields are sorted in static and non-static fields
 781 //   - non-static fields are also sorted according to their contention group
 782 //     (support of the @Contended annotation)
 783 //   - @Contended annotation is ignored for static fields
 784 //   - field flattening decisions are taken in this method
 785 void FieldLayoutBuilder::regular_field_sorting() {
 786   int idx = 0;
 787   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {

 788     FieldGroup* group = nullptr;
 789     FieldInfo fieldinfo = *it;
 790     if (fieldinfo.access_flags().is_static()) {
 791       group = _static_fields;
 792     } else {
 793       _has_nonstatic_fields = true;
 794       if (fieldinfo.field_flags().is_contended()) {
 795         int g = fieldinfo.contended_group();
 796         if (g == 0) {
 797           group = new FieldGroup(true);
 798           _contended_groups.append(group);
 799         } else {
 800           group = get_or_create_contended_group(g);
 801         }
 802       } else {
 803         group = _root_group;
 804       }
 805     }
 806     assert(group != nullptr, "invariant");
 807     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 808     switch(type) {
 809     case T_BYTE:
 810     case T_CHAR:
 811     case T_DOUBLE:
 812     case T_FLOAT:
 813     case T_INT:
 814     case T_LONG:
 815     case T_SHORT:
 816     case T_BOOLEAN:
 817       group->add_primitive_field(idx, type);
 818       break;
 819     case T_OBJECT:
 820     case T_ARRAY:
 821     {
 822       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
 823       if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
 824           || (!fieldinfo.field_flags().is_injected()
 825               && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
 826               && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
 827         _has_inline_type_fields = true;
 828         _has_flattening_information = true;
 829       }
 830       if (lk == LayoutKind::REFERENCE) {
 831         if (group != _static_fields) _nonstatic_oopmap_count++;
 832         group->add_oop_field(idx);
 833       } else {
 834         _has_flattening_information = true;
 835         InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
 836         int size, alignment;
 837         get_size_and_alignment(vk, lk, &size, &alignment);
 838         group->add_flat_field(idx, vk, lk, size, alignment);
 839         _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
 840         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 841         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 842         _field_info->adr_at(idx)->set_layout_kind(lk);
 843         // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
 844       }
 845       break;
 846     }
 847     default:
 848       fatal("Something wrong?");
 849     }
 850   }
 851   _root_group->sort_by_size();
 852   _static_fields->sort_by_size();
 853   if (!_contended_groups.is_empty()) {
 854     for (int i = 0; i < _contended_groups.length(); i++) {
 855       _contended_groups.at(i)->sort_by_size();
 856     }
 857   }
 858 }
 859 
 860 /* Field sorting for inline classes:
 861  *   - because inline classes are immutable, the @Contended annotation is ignored
 862  *     when computing their layout (with only read operation, there's no false
 863  *     sharing issue)
 864  *   - this method also records the alignment of the field with the most
 865  *     constraining alignment, this value is then used as the alignment
 866  *     constraint when flattening this inline type into another container
 867  *   - field flattening decisions are taken in this method (those decisions are
 868  *     currently only based in the size of the fields to be flattened, the size
 869  *     of the resulting instance is not considered)
 870  */
 871 void FieldLayoutBuilder::inline_class_field_sorting() {
 872   assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
 873   int alignment = -1;
 874   int idx = 0;
 875   for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
 876     FieldGroup* group = nullptr;
 877     FieldInfo fieldinfo = *it;
 878     int field_alignment = 1;
 879     if (fieldinfo.access_flags().is_static()) {
 880       group = _static_fields;
 881     } else {
 882       _has_nonstatic_fields = true;
 883       _declared_non_static_fields_count++;
 884       group = _root_group;
 885     }
 886     assert(group != nullptr, "invariant");
 887     BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
 888     switch(type) {
 889     case T_BYTE:
 890     case T_CHAR:
 891     case T_DOUBLE:
 892     case T_FLOAT:
 893     case T_INT:
 894     case T_LONG:
 895     case T_SHORT:
 896     case T_BOOLEAN:
 897       if (group != _static_fields) {
 898         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 899       }
 900       group->add_primitive_field(fieldinfo.index(), type);
 901       break;
 902     case T_OBJECT:
 903     case T_ARRAY:
 904     {
 905       bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
 906       LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
 907       if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
 908           || (!fieldinfo.field_flags().is_injected()
 909               && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
 910               && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
 911         _has_inline_type_fields = true;
 912         _has_flattening_information = true;
 913       }
 914       if (lk == LayoutKind::REFERENCE) {
 915         if (group != _static_fields) {
 916           _nonstatic_oopmap_count++;
 917           field_alignment = type2aelembytes(type); // alignment == size for oops
 918         }
 919         group->add_oop_field(idx);
 920       } else {
 921         _has_flattening_information = true;
 922         InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
 923         if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
 924         int size, alignment;
 925         get_size_and_alignment(vk, lk, &size, &alignment);
 926         group->add_flat_field(idx, vk, lk, size, alignment);
 927         _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
 928         _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 929         field_alignment = alignment;
 930         _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
 931         _field_info->adr_at(idx)->set_layout_kind(lk);
 932       }
 933       break;
 934     }
 935     default:
 936       fatal("Unexpected BasicType");
 937     }
 938     if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 939   }
 940   _payload_alignment = alignment;
 941   assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
 942 }
 943 
 944 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 945   if (ContendedPaddingWidth > 0) {
 946     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 947     _layout->insert(slot, padding);
 948   }
 949 }
 950 
 951 // Computation of regular classes layout is an evolution of the previous default layout
 952 // (FieldAllocationStyle 1):
 953 //   - primitive fields (both primitive types and flat inline types) are allocated
 954 //     first (from the biggest to the smallest)
 955 //   - oop fields are allocated, either in existing gaps or at the end of
 956 //     the layout. We allocate oops in a single block to have a single oop map entry.
 957 //   - if the super class ended with an oop, we lead with oops. That will cause the
 958 //     trailing oop map entry of the super class and the oop map entry of this class
 959 //     to be folded into a single entry later. Correspondingly, if the super class
 960 //     ends with a primitive field, we gain nothing by leading with oops; therefore
 961 //     we let oop fields trail, thus giving future derived classes the chance to apply
 962 //     the same trick.
 963 void FieldLayoutBuilder::compute_regular_layout() {
 964   bool need_tail_padding = false;
 965   prologue();
 966   regular_field_sorting();

 967   if (_is_contended) {
 968     _layout->set_start(_layout->last_block());
 969     // insertion is currently easy because the current strategy doesn't try to fill holes
 970     // in super classes layouts => the _start block is by consequence the _last_block
 971     insert_contended_padding(_layout->start());
 972     need_tail_padding = true;
 973   }
 974 
 975   if (_super_ends_with_oop) {
 976     _layout->add(_root_group->oop_fields());
 977     _layout->add(_root_group->big_primitive_fields());
 978     _layout->add(_root_group->small_primitive_fields());
 979   } else {
 980     _layout->add(_root_group->big_primitive_fields());
 981     _layout->add(_root_group->small_primitive_fields());
 982     _layout->add(_root_group->oop_fields());
 983   }
 984 
 985   if (!_contended_groups.is_empty()) {
 986     for (int i = 0; i < _contended_groups.length(); i++) {
 987       FieldGroup* cg = _contended_groups.at(i);
 988       LayoutRawBlock* start = _layout->last_block();
 989       insert_contended_padding(start);
 990       _layout->add(cg->big_primitive_fields());
 991       _layout->add(cg->small_primitive_fields(), start);
 992       _layout->add(cg->oop_fields(), start);
 993       need_tail_padding = true;
 994     }
 995   }
 996 
 997   if (need_tail_padding) {
 998     insert_contended_padding(_layout->last_block());
 999   }
1000 
1001   // Warning: IntanceMirrorKlass expects static oops to be allocated first
1002   _static_layout->add_contiguously(_static_fields->oop_fields());
1003   _static_layout->add(_static_fields->big_primitive_fields());
1004   _static_layout->add(_static_fields->small_primitive_fields());
1005 
1006   epilogue();
1007 }
1008 
1009 /* Computation of inline classes has a slightly different strategy than for
1010  * regular classes. Regular classes have their oop fields allocated at the end
1011  * of the layout to increase GC performances. Unfortunately, this strategy
1012  * increases the number of empty slots inside an instance. Because the purpose
1013  * of inline classes is to be embedded into other containers, it is critical
1014  * to keep their size as small as possible. For this reason, the allocation
1015  * strategy is:
1016  *   - big primitive fields (primitive types and flat inline type smaller
1017  *     than an oop) are allocated first (from the biggest to the smallest)
1018  *   - then oop fields
1019  *   - then small primitive fields (from the biggest to the smallest)
1020  */
1021 void FieldLayoutBuilder::compute_inline_class_layout() {
1022 
1023   // Test if the concrete inline class is an empty class (no instance fields)
1024   // and insert a dummy field if needed
1025   if (!_is_abstract_value) {
1026     bool declares_non_static_fields = false;
1027     for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
1028       FieldInfo fieldinfo = *it;
1029       if (!fieldinfo.access_flags().is_static()) {
1030         declares_non_static_fields = true;
1031         break;
1032       }
1033     }
1034     if (!declares_non_static_fields) {
1035       bool has_inherited_fields = false;
1036       const InstanceKlass* super = _super_klass;
1037       while(super != nullptr) {
1038         if (super->has_nonstatic_fields()) {
1039           has_inherited_fields = true;
1040           break;
1041         }
1042         super = super->super() == nullptr ? nullptr : InstanceKlass::cast(super->super());
1043       }
1044 
1045       if (!has_inherited_fields) {
1046         // Inject ".empty" dummy field
1047         _is_empty_inline_class = true;
1048         FieldInfo::FieldFlags fflags(0);
1049         fflags.update_injected(true);
1050         AccessFlags aflags;
1051         FieldInfo fi(aflags,
1052                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1053                     (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1054                     0,
1055                     fflags);
1056         int idx = _field_info->append(fi);
1057         _field_info->adr_at(idx)->set_index(idx);
1058       }
1059     }
1060   }
1061 
1062   prologue();
1063   inline_class_field_sorting();
1064 
1065   assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1066 
1067   if (_layout->super_has_fields() && !_is_abstract_value) {  // non-static field layout
1068     if (!_has_nonstatic_fields) {
1069       assert(_is_abstract_value, "Concrete value types have at least one field");
1070       // Nothing to do
1071     } else {
1072       // decide which alignment to use, then set first allowed field offset
1073 
1074       assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1075       assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1076 
1077       if (_payload_alignment < _layout->super_alignment()) {
1078         int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1079         assert(new_alignment % _payload_alignment == 0, "Must be");
1080         assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1081         _payload_alignment = new_alignment;
1082       }
1083       _layout->set_start(_layout->first_field_block());
1084     }
1085   } else {
1086     if (_is_abstract_value && _has_nonstatic_fields) {
1087       _payload_alignment = type2aelembytes(BasicType::T_LONG);
1088     }
1089     assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1090     LayoutRawBlock* first_empty = _layout->start()->next_block();
1091     if (first_empty->offset() % _payload_alignment != 0) {
1092       LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1093       _layout->insert(first_empty, padding);
1094       if (first_empty->size() == 0) {
1095         _layout->remove(first_empty);
1096       }
1097       _layout->set_start(padding);
1098     }
1099   }
1100 
1101   _layout->add(_root_group->big_primitive_fields());
1102   _layout->add(_root_group->oop_fields());
1103   _layout->add(_root_group->small_primitive_fields());
1104 
1105   LayoutRawBlock* first_field = _layout->first_field_block();
1106   if (first_field != nullptr) {
1107     _payload_offset = _layout->first_field_block()->offset();
1108     _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1109   } else {
1110     assert(_is_abstract_value, "Concrete inline types must have at least one field");
1111     _payload_offset = _layout->blocks()->size();
1112     _payload_size_in_bytes = 0;
1113   }
1114 
1115   // Determining if the value class is naturally atomic:
1116   if ((!_layout->super_has_fields() && _declared_non_static_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1117       || (_layout->super_has_fields() && _super_klass->is_naturally_atomic() && _declared_non_static_fields_count == 0)) {
1118         _is_naturally_atomic = true;
1119   }
1120 
1121   // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1122   // From this, additional layouts will be computed: atomic and nullable layouts
1123   // Once those additional layouts are computed, the raw layout might need some adjustments
1124 
1125   bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1126 
1127   if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1128     // Validation of the non atomic layout
1129     if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1130       _non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1131       _non_atomic_layout_alignment = _payload_alignment;
1132     }
1133 
1134     // Next step is to compute the characteristics for a layout enabling atomic updates
1135     if (UseAtomicValueFlattening) {
1136       int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1137       if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1138         _atomic_layout_size_in_bytes = atomic_size;
1139       }
1140     }
1141 
1142     // Next step is the nullable layout: the layout must include a null marker and must also be atomic
1143     if (UseNullableValueFlattening) {
1144       // Looking if there's an empty slot inside the layout that could be used to store a null marker
1145       // FIXME: could it be possible to re-use the .empty field as a null marker for empty values?
1146       LayoutRawBlock* b = _layout->first_field_block();
1147       assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1148       int null_marker_offset = -1;
1149       if (_is_empty_inline_class) {
1150         // Reusing the dummy field as a field marker
1151         assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1152         null_marker_offset = b->offset();
1153       } else {
1154         while (b != _layout->last_block()) {
1155           if (b->block_kind() == LayoutRawBlock::EMPTY) {
1156             break;
1157           }
1158           b = b->next_block();
1159         }
1160         if (b != _layout->last_block()) {
1161           // found an empty slot, register its offset from the beginning of the payload
1162           null_marker_offset = b->offset();
1163           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1164           _layout->add_field_at_offset(marker, b->offset());
1165         }
1166         if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1167           int last_offset = _layout->last_block()->offset();
1168           LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1169           _layout->insert_field_block(_layout->last_block(), marker);
1170           assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1171           null_marker_offset = marker->offset();
1172         }
1173       }
1174 
1175       // Now that the null marker is there, the size of the nullable layout must computed (remember, must be atomic too)
1176       int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1177       int nullable_size = round_up_power_of_2(new_raw_size);
1178       if (nullable_size <= (int)MAX_ATOMIC_OP_SIZE) {
1179         _nullable_layout_size_in_bytes = nullable_size;
1180         _null_marker_offset = null_marker_offset;
1181       } else {
1182         // If the nullable layout is rejected, the NULL_MARKER block should be removed
1183         // from the layout, otherwise it will appear anyway if the layout is printer
1184         if (!_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1185           _layout->remove_null_marker();
1186         }
1187         _null_marker_offset = -1;
1188       }
1189     }
1190     // If the inline class has an atomic or nullable (which is also atomic) layout,
1191     // we want the raw layout to have the same alignment as those atomic layouts so access codes
1192     // could remain  simple (single instruction without intermediate copy). This might required
1193     // to shift all fields in the raw layout, but this operation is possible only if the class
1194     // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1195     // field shift is needed but not possible, all atomic layouts are disabled and only reference
1196     // and loosely consistent are supported.
1197     int required_alignment = _payload_alignment;
1198     if (has_atomic_layout() && _payload_alignment < atomic_layout_size_in_bytes()) {
1199       required_alignment = atomic_layout_size_in_bytes();
1200     }
1201     if (has_nullable_atomic_layout() && _payload_alignment < nullable_layout_size_in_bytes()) {
1202       required_alignment = nullable_layout_size_in_bytes();
1203     }
1204     int shift = first_field->offset() % required_alignment;
1205     if (shift != 0) {
1206       if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1207         assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1208         _layout->shift_fields(shift);
1209         _payload_offset = _layout->first_field_block()->offset();
1210         if (has_nullable_atomic_layout()) {
1211           assert(!_is_empty_inline_class, "Should not get here with empty values");
1212           _null_marker_offset = _layout->find_null_marker()->offset();
1213         }
1214         _payload_alignment = required_alignment;
1215       } else {
1216         _atomic_layout_size_in_bytes = -1;
1217         if (has_nullable_atomic_layout() && !_is_empty_inline_class) {  // empty values don't have a dedicated NULL_MARKER block
1218           _layout->remove_null_marker();
1219         }
1220         _nullable_layout_size_in_bytes = -1;
1221         _null_marker_offset = -1;
1222       }
1223     } else {
1224       _payload_alignment = required_alignment;
1225     }
1226 
1227     // If the inline class has a nullable layout, the layout used in heap allocated standalone
1228     // instances must also be the nullable layout, in order to be able to set the null marker to
1229     // non-null before copying the payload to other containers.
1230     if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_layout_size_in_bytes()) {
1231       _payload_size_in_bytes = nullable_layout_size_in_bytes();
1232     }
1233   }
1234   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1235   _static_layout->add_contiguously(_static_fields->oop_fields());
1236   _static_layout->add(_static_fields->big_primitive_fields());
1237   _static_layout->add(_static_fields->small_primitive_fields());
1238 
1239   if (UseAltSubstitutabilityMethod) {
1240     generate_acmp_maps();
1241   }
1242   epilogue();
1243 }
1244 
1245 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1246                 InlineKlass* vklass, int offset) {
1247   int diff = offset - vklass->payload_offset();
1248   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1249   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1250   while (map < last_map) {
1251     nonstatic_oop_maps->add(map->offset() + diff, map->count());
1252     map++;
1253   }
1254 }
1255 
1256 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1257   if (list == nullptr) return;
1258   for (int i = 0; i < list->length(); i++) {
1259     LayoutRawBlock* f = list->at(i);
1260     if (f->block_kind() == LayoutRawBlock::FLAT) {
1261       InlineKlass* vk = f->inline_klass();
1262       assert(vk != nullptr, "Should have been initialized");
1263       if (vk->contains_oops()) {
1264         add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1265       }
1266     }
1267   }
1268 }
1269 
1270 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1271   if (group->oop_fields() != nullptr) {
1272     for (int i = 0; i < group->oop_fields()->length(); i++) {
1273       LayoutRawBlock* b = group->oop_fields()->at(i);
1274       nonstatic_oop_maps->add(b->offset(), 1);
1275     }
1276   }
1277   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1278   register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1279 }
1280 
1281 static int insert_segment(GrowableArray<Pair<int,int>>* map, int offset, int size, int last_idx) {
1282   if (map->is_empty()) {
1283     return map->append(Pair<int,int>(offset, size));
1284   }
1285   last_idx = last_idx == -1 ? 0 : last_idx;
1286   int start = map->adr_at(last_idx)->first > offset ? 0 : last_idx;
1287   bool inserted = false;
1288   for (int c = start; c < map->length(); c++) {
1289     if (offset == (map->adr_at(c)->first + map->adr_at(c)->second)) {
1290       //contiguous to the last field, can be coalesced
1291       map->adr_at(c)->second = map->adr_at(c)->second + size;
1292       inserted = true;
1293       break;  // break out of the for loop
1294     }
1295     if (offset < (map->adr_at(c)->first)) {
1296       map->insert_before(c, Pair<int,int>(offset, size));
1297       last_idx = c;
1298       inserted = true;
1299       break;  // break out of the for loop
1300     }
1301   }
1302   if (!inserted) {
1303     last_idx = map->append(Pair<int,int>(offset, size));
1304   }
1305   return last_idx;
1306 }
1307 
1308 static int insert_map_at_offset(GrowableArray<Pair<int,int>>* nonoop_map, GrowableArray<int>* oop_map,
1309                                 const InstanceKlass* ik, int offset, int payload_offset, int last_idx) {
1310   oop mirror = ik->java_mirror();
1311   oop array = mirror->obj_field(ik->acmp_maps_offset());
1312   assert(array != nullptr, "Sanity check");
1313   typeArrayOop fmap = (typeArrayOop)array;
1314   typeArrayHandle fmap_h(Thread::current(), fmap);
1315   int nb_nonoop_field = fmap_h->int_at(0);
1316   int field_offset = offset - payload_offset;
1317   for (int i = 0; i < nb_nonoop_field; i++) {
1318     last_idx = insert_segment(nonoop_map,
1319                               field_offset + fmap_h->int_at( i * 2 + 1),
1320                               fmap_h->int_at( i * 2 + 2), last_idx);
1321   }
1322   int len = fmap_h->length();
1323   for (int i = nb_nonoop_field * 2 + 1; i < len; i++) {
1324       oop_map->append(field_offset + fmap_h->int_at(i));
1325   }
1326   return last_idx;
1327 }
1328 
1329 static void split_after(GrowableArray<Pair<int,int>>* map, int idx, int head) {
1330   int offset = map->adr_at(idx)->first;
1331   int size = map->adr_at(idx)->second;
1332   if (size <= head) return;
1333   map->adr_at(idx)->first = offset + head;
1334   map->adr_at(idx)->second = size - head;
1335   map->insert_before(idx, Pair<int,int>(offset, head));
1336 
1337 }
1338 
1339 void FieldLayoutBuilder::generate_acmp_maps() {
1340   assert(_is_inline_type || _is_abstract_value, "Must be done only for value classes (abstract or not)");
1341 
1342   // create/initialize current class' maps
1343   // The Pair<int,int> values in the nonoop_acmp_map represent <offset,size> segments of memory
1344   _nonoop_acmp_map = new GrowableArray<Pair<int,int>>();
1345   _oop_acmp_map = new GrowableArray<int>();
1346   if (_is_empty_inline_class) return;
1347   // last_idx remembers the position of the last insertion in order to speed up the next insertion.
1348   // Local fields are processed in ascending offset order, so an insertion is very likely be performed
1349   // next to the previous insertion. However, in some cases local fields and inherited fields can be
1350   // interleaved, in which case the search of the insertion position cannot depend on the previous insertion.
1351   int last_idx = -1;
1352   if (_super_klass != nullptr && _super_klass != vmClasses::Object_klass()) {  // Assumes j.l.Object cannot have fields
1353     last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, _super_klass, 0, 0, last_idx);
1354   }
1355 
1356   // Processing local fields
1357   LayoutRawBlock* b = _layout->blocks();
1358   while(b != _layout->last_block()) {
1359     switch(b->block_kind()) {
1360       case LayoutRawBlock::RESERVED:
1361       case LayoutRawBlock::EMPTY:
1362       case LayoutRawBlock::PADDING:
1363       case LayoutRawBlock::NULL_MARKER:
1364       case LayoutRawBlock::INHERITED: // inherited fields are handled during maps creation/initialization
1365         // skip
1366         break;
1367 
1368       case LayoutRawBlock::REGULAR:
1369         {
1370           FieldInfo* fi = _field_info->adr_at(b->field_index());
1371           if (fi->signature(_constant_pool)->starts_with("L") || fi->signature(_constant_pool)->starts_with("[")) {
1372             _oop_acmp_map->append(b->offset());
1373           } else {
1374             // Non-oop case
1375             last_idx = insert_segment(_nonoop_acmp_map, b->offset(), b->size(), last_idx);
1376           }
1377           break;
1378        }
1379       case LayoutRawBlock::FLAT:
1380         {
1381           InlineKlass* vk = b->inline_klass();
1382           last_idx = insert_map_at_offset(_nonoop_acmp_map, _oop_acmp_map, vk, b->offset(), vk->payload_offset(), last_idx);
1383           if (LayoutKindHelper::is_nullable_flat(b->layout_kind())) {
1384             int null_marker_offset = b->offset() + vk->null_marker_offset_in_payload();
1385             last_idx = insert_segment(_nonoop_acmp_map, null_marker_offset, 1, last_idx);
1386             // Important note: the implementation assumes that for nullable flat fields, if the
1387             // null marker is zero (field is null), then all the fields of the flat field are also
1388             // zeroed. So, nullable flat field are not encoded different than null-free flat fields,
1389             // all fields are included in the map, plus the null marker
1390             // If it happens that the assumption above is wrong, then nullable flat fields would
1391             // require a dedicated section in the acmp map, and be handled differently: null_marker
1392             // comparison first, and if null markers are identical and non-zero, then conditional
1393             // comparison of the other fields
1394           }
1395         }
1396         break;
1397 
1398     }
1399     b = b->next_block();
1400   }
1401 
1402   // split segments into well-aligned blocks
1403   int idx = 0;
1404   while (idx < _nonoop_acmp_map->length()) {
1405     int offset = _nonoop_acmp_map->adr_at(idx)->first;
1406     int size = _nonoop_acmp_map->adr_at(idx)->second;
1407     int mod = offset % 8;
1408     switch (mod) {
1409       case 0:
1410         break;
1411       case 4:
1412         split_after(_nonoop_acmp_map, idx, 4);
1413         break;
1414       case 2:
1415       case 6:
1416         split_after(_nonoop_acmp_map, idx, 2);
1417         break;
1418       case 1:
1419       case 3:
1420       case 5:
1421       case 7:
1422         split_after(_nonoop_acmp_map, idx, 1);
1423         break;
1424       default:
1425         ShouldNotReachHere();
1426     }
1427     idx++;
1428   }
1429 }
1430 
1431 void FieldLayoutBuilder::epilogue() {
1432   // Computing oopmaps
1433   OopMapBlocksBuilder* nonstatic_oop_maps =
1434       new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1435   int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1436   if (super_oop_map_count > 0) {
1437     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1438     _super_klass->nonstatic_oop_map_count());
1439   }
1440   register_embedded_oops(nonstatic_oop_maps, _root_group);
1441   if (!_contended_groups.is_empty()) {
1442     for (int i = 0; i < _contended_groups.length(); i++) {
1443       FieldGroup* cg = _contended_groups.at(i);
1444       if (cg->oop_count() > 0) {
1445         assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1446         register_embedded_oops(nonstatic_oop_maps, cg);
1447       }
1448     }
1449   }

1450   nonstatic_oop_maps->compact();
1451 
1452   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1453   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1454   int static_fields_size = (static_fields_end -
1455       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1456   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1457 
1458   // Pass back information needed for InstanceKlass creation
1459 
1460   _info->oop_map_blocks = nonstatic_oop_maps;
1461   _info->_instance_size = align_object_size(instance_end / wordSize);
1462   _info->_static_field_size = static_fields_size;
1463   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1464   _info->_has_nonstatic_fields = _has_nonstatic_fields;
1465   _info->_has_inline_fields = _has_inline_type_fields;
1466   _info->_is_naturally_atomic = _is_naturally_atomic;
1467   if (_is_inline_type) {
1468     _info->_must_be_atomic = _must_be_atomic;
1469     _info->_payload_alignment = _payload_alignment;
1470     _info->_payload_offset = _payload_offset;
1471     _info->_payload_size_in_bytes = _payload_size_in_bytes;
1472     _info->_non_atomic_size_in_bytes = _non_atomic_layout_size_in_bytes;
1473     _info->_non_atomic_alignment = _non_atomic_layout_alignment;
1474     _info->_atomic_layout_size_in_bytes = _atomic_layout_size_in_bytes;
1475     _info->_nullable_layout_size_in_bytes = _nullable_layout_size_in_bytes;
1476     _info->_null_marker_offset = _null_marker_offset;
1477     _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1478     _info->_is_empty_inline_klass = _is_empty_inline_class;
1479   }
1480 
1481   // Acmp maps are needed for both concrete and abstract value classes
1482   if (UseAltSubstitutabilityMethod && (_is_inline_type || _is_abstract_value)) {
1483     _info->_acmp_maps_offset = _static_layout->acmp_maps_offset();
1484     _info->_nonoop_acmp_map = _nonoop_acmp_map;
1485     _info->_oop_acmp_map = _oop_acmp_map;
1486   }
1487 
1488   // This may be too restrictive, since if all the fields fit in 64
1489   // bits we could make the decision to align instances of this class
1490   // to 64-bit boundaries, and load and store them as single words.
1491   // And on machines which supported larger atomics we could similarly
1492   // allow larger values to be atomic, if properly aligned.
1493 
1494 #ifdef ASSERT
1495   // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1496   // which prints the details of LayoutRawBlocks used to compute the layout.
1497   // The code below checks that offsets in the _field_info meta-data match offsets
1498   // in the LayoutRawBlocks
1499   LayoutRawBlock* b = _layout->blocks();
1500   while(b != _layout->last_block()) {
1501     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1502       if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1503         tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1504       }
1505       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1506     }
1507     b = b->next_block();
1508   }
1509   b = _static_layout->blocks();
1510   while(b != _static_layout->last_block()) {
1511     if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1512       assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1513     }
1514     b = b->next_block();
1515   }
1516 #endif // ASSERT
1517 
1518   static bool first_layout_print = true;
1519 
1520 
1521   if (PrintFieldLayout || (PrintInlineLayout && (_has_flattening_information || _is_abstract_value))) {
1522     ResourceMark rm;
1523     stringStream st;
1524     if (first_layout_print) {
1525       st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1526       st.print_cr("Heap oop size = %d", heapOopSize);
1527       first_layout_print = false;
1528     }
1529     if (_super_klass != nullptr) {
1530       st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1531                     _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1532     } else {
1533       st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1534     }
1535     st.print_cr("Instance fields:");
1536     _layout->print(&st, false, _super_klass, _inline_layout_info_array);
1537     st.print_cr("Static fields:");
1538     _static_layout->print(&st, true, nullptr, _inline_layout_info_array);
1539     st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1540     if (_is_inline_type) {
1541       st.print_cr("First field offset = %d", _payload_offset);
1542       st.print_cr("%s layout: %d/%d", LayoutKindHelper::layout_kind_as_string(LayoutKind::BUFFERED),
1543                   _payload_size_in_bytes, _payload_alignment);
1544       if (has_non_atomic_flat_layout()) {
1545         st.print_cr("%s layout: %d/%d",
1546                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT),
1547                     _non_atomic_layout_size_in_bytes, _non_atomic_layout_alignment);
1548       } else {
1549         st.print_cr("%s layout: -/-",
1550                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_NON_ATOMIC_FLAT));
1551       }
1552       if (has_atomic_layout()) {
1553         st.print_cr("%s layout: %d/%d",
1554                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT),
1555                     _atomic_layout_size_in_bytes, _atomic_layout_size_in_bytes);
1556       } else {
1557         st.print_cr("%s layout: -/-",
1558                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULL_FREE_ATOMIC_FLAT));
1559       }
1560       if (has_nullable_atomic_layout()) {
1561         st.print_cr("%s layout: %d/%d",
1562                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT),
1563                     _nullable_layout_size_in_bytes, _nullable_layout_size_in_bytes);
1564       } else {
1565         st.print_cr("%s layout: -/-",
1566                     LayoutKindHelper::layout_kind_as_string(LayoutKind::NULLABLE_ATOMIC_FLAT));
1567       }
1568       if (_null_marker_offset != -1) {
1569         st.print_cr("Null marker offset = %d", _null_marker_offset);
1570       }
1571       if (UseAltSubstitutabilityMethod) {
1572         st.print("Non-oop acmp map: ");
1573         for (int i = 0 ; i < _nonoop_acmp_map->length(); i++) {
1574           st.print("<%d,%d>, ", _nonoop_acmp_map->at(i).first,  _nonoop_acmp_map->at(i).second);
1575         }
1576         st.print_cr("");
1577         st.print("oop acmp map: ");
1578         for (int i = 0 ; i < _oop_acmp_map->length(); i++) {
1579           st.print("%d, ", _oop_acmp_map->at(i));
1580         }
1581         st.print_cr("");
1582       }
1583     }
1584     st.print_cr("---");
1585     // Print output all together.
1586     tty->print_raw(st.as_string());
1587   }
1588 }
1589 
1590 void FieldLayoutBuilder::build_layout() {
1591   if (_is_inline_type || _is_abstract_value) {
1592     compute_inline_class_layout();
1593   } else {
1594     compute_regular_layout();
1595   }
1596 }
< prev index next >