1 /*
   2  * Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "classfile/classFileParser.hpp"
  28 #include "classfile/fieldLayoutBuilder.hpp"
  29 #include "classfile/systemDictionary.hpp"
  30 #include "classfile/vmSymbols.hpp"
  31 #include "memory/resourceArea.hpp"
  32 #include "oops/array.hpp"
  33 #include "oops/fieldStreams.inline.hpp"
  34 #include "oops/instanceMirrorKlass.hpp"
  35 #include "oops/instanceKlass.inline.hpp"
  36 #include "oops/klass.inline.hpp"
  37 #include "oops/inlineKlass.inline.hpp"
  38 #include "runtime/fieldDescriptor.inline.hpp"
  39 
  40 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
  41   _next_block(NULL),
  42   _prev_block(NULL),
  43   _inline_klass(NULL),
  44   _kind(kind),
  45   _offset(-1),
  46   _alignment(1),
  47   _size(size),
  48   _field_index(-1),
  49   _is_reference(false) {
  50   assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
  51          "Otherwise, should use the constructor with a field index argument");
  52   assert(size > 0, "Sanity check");
  53 }
  54 
  55 
  56 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
  57  _next_block(NULL),
  58  _prev_block(NULL),
  59  _inline_klass(NULL),
  60  _kind(kind),
  61  _offset(-1),
  62  _alignment(alignment),
  63  _size(size),
  64  _field_index(index),
  65  _is_reference(is_reference) {
  66   assert(kind == REGULAR || kind == INLINED || kind == INHERITED,
  67          "Other kind do not have a field index");
  68   assert(size > 0, "Sanity check");
  69   assert(alignment > 0, "Sanity check");
  70 }
  71 
  72 bool LayoutRawBlock::fit(int size, int alignment) {
  73   int adjustment = 0;
  74   if ((_offset % alignment) != 0) {
  75     adjustment = alignment - (_offset % alignment);
  76   }
  77   return _size >= size + adjustment;
  78 }
  79 
  80 FieldGroup::FieldGroup(int contended_group) :
  81   _next(NULL),
  82   _small_primitive_fields(NULL),
  83   _big_primitive_fields(NULL),
  84   _oop_fields(NULL),
  85   _contended_group(contended_group),  // -1 means no contended group, 0 means default contended group
  86   _oop_count(0) {}
  87 
  88 void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) {
  89   int size = type2aelembytes(type);
  90   LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
  91   if (size >= oopSize) {
  92     add_to_big_primitive_list(block);
  93   } else {
  94     add_to_small_primitive_list(block);
  95   }
  96 }
  97 
  98 void FieldGroup::add_oop_field(AllFieldStream fs) {
  99   int size = type2aelembytes(T_OBJECT);
 100   LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
 101   if (_oop_fields == NULL) {
 102     _oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 103   }
 104   _oop_fields->append(block);
 105   _oop_count++;
 106 }
 107 
 108 void FieldGroup::add_inlined_field(AllFieldStream fs, InlineKlass* vk) {
 109   LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INLINED, vk->get_exact_size_in_bytes(), vk->get_alignment(), false);
 110   block->set_inline_klass(vk);
 111   if (block->size() >= oopSize) {
 112     add_to_big_primitive_list(block);
 113   } else {
 114     add_to_small_primitive_list(block);
 115   }
 116 }
 117 
 118 void FieldGroup::sort_by_size() {
 119   if (_small_primitive_fields != NULL) {
 120     _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 121   }
 122   if (_big_primitive_fields != NULL) {
 123     _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
 124   }
 125 }
 126 
 127 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
 128   if (_small_primitive_fields == NULL) {
 129     _small_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 130   }
 131   _small_primitive_fields->append(block);
 132 }
 133 
 134 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
 135   if (_big_primitive_fields == NULL) {
 136     _big_primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
 137   }
 138   _big_primitive_fields->append(block);
 139 }
 140 
 141 FieldLayout::FieldLayout(Array<u2>* fields, ConstantPool* cp) :
 142   _fields(fields),
 143   _cp(cp),
 144   _blocks(NULL),
 145   _start(_blocks),
 146   _last(_blocks) {}
 147 
 148 void FieldLayout::initialize_static_layout() {
 149   _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 150   _blocks->set_offset(0);
 151   _last = _blocks;
 152   _start = _blocks;
 153   // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
 154   // during bootstrapping, the size of the java.lang.Class is still not known when layout
 155   // of static field is computed. Field offsets are fixed later when the size is known
 156   // (see java_lang_Class::fixup_mirror())
 157   if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
 158     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
 159     _blocks->set_offset(0);
 160   }
 161 }
 162 
 163 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
 164   if (super_klass == NULL) {
 165     _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 166     _blocks->set_offset(0);
 167     _last = _blocks;
 168     _start = _blocks;
 169     insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
 170   } else {
 171     bool has_fields = reconstruct_layout(super_klass);
 172     fill_holes(super_klass);
 173     if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) {
 174       _start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses
 175       // to allocate fields in empty slots of their super classes
 176     } else {
 177       _start = _last;    // append fields at the end of the reconstructed layout
 178     }
 179   }
 180 }
 181 
 182 LayoutRawBlock* FieldLayout::first_field_block() {
 183   LayoutRawBlock* block = _blocks;
 184   while (block != NULL
 185          && block->kind() != LayoutRawBlock::INHERITED
 186          && block->kind() != LayoutRawBlock::REGULAR
 187          && block->kind() != LayoutRawBlock::INLINED) {
 188     block = block->next_block();
 189   }
 190   return block;
 191 }
 192 
 193 // Insert a set of fields into a layout.
 194 // For each field, search for an empty slot able to fit the field
 195 // (satisfying both size and alignment requirements), if none is found,
 196 // add the field at the end of the layout.
 197 // Fields cannot be inserted before the block specified in the "start" argument
 198 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 199   if (list == NULL) return;
 200   if (start == NULL) start = this->_start;
 201   bool last_search_success = false;
 202   int last_size = 0;
 203   int last_alignment = 0;
 204   for (int i = 0; i < list->length(); i ++) {
 205     LayoutRawBlock* b = list->at(i);
 206     LayoutRawBlock* cursor = NULL;
 207     LayoutRawBlock* candidate = NULL;
 208     // if start is the last block, just append the field
 209     if (start == last_block()) {
 210       candidate = last_block();
 211     }
 212     // Before iterating over the layout to find an empty slot fitting the field's requirements,
 213     // check if the previous field had the same requirements and if the search for a fitting slot
 214     // was successful. If the requirements were the same but the search failed, a new search will
 215     // fail the same way, so just append the field at the of the layout.
 216     else  if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
 217       candidate = last_block();
 218     } else {
 219       // Iterate over the layout to find an empty slot fitting the field's requirements
 220       last_size = b->size();
 221       last_alignment = b->alignment();
 222       cursor = last_block()->prev_block();
 223       assert(cursor != NULL, "Sanity check");
 224       last_search_success = true;
 225 
 226       while (cursor != start) {
 227         if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
 228           if (candidate == NULL || cursor->size() < candidate->size()) {
 229             candidate = cursor;
 230           }
 231         }
 232         cursor = cursor->prev_block();
 233       }
 234       if (candidate == NULL) {
 235         candidate = last_block();
 236         last_search_success = false;
 237       }
 238       assert(candidate != NULL, "Candidate must not be null");
 239       assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 240       assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
 241     }
 242     insert_field_block(candidate, b);
 243   }
 244 }
 245 
 246 // Used for classes with hard coded field offsets, insert a field at the specified offset */
 247 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
 248   assert(block != NULL, "Sanity check");
 249   block->set_offset(offset);
 250   if (start == NULL) {
 251     start = this->_start;
 252   }
 253   LayoutRawBlock* slot = start;
 254   while (slot != NULL) {
 255     if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
 256         slot == _last){
 257       assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
 258       assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
 259       if (slot->offset() < block->offset()) {
 260         int adjustment = block->offset() - slot->offset();
 261         LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 262         insert(slot, adj);
 263       }
 264       insert(slot, block);
 265       if (slot->size() == 0) {
 266         remove(slot);
 267       }
 268       FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
 269       return;
 270     }
 271     slot = slot->next_block();
 272   }
 273   fatal("Should have found a matching slot above, corrupted layout or invalid offset");
 274 }
 275 
 276 // The allocation logic uses a best fit strategy: the set of fields is allocated
 277 // in the first empty slot big enough to contain the whole set ((including padding
 278 // to fit alignment constraints).
 279 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
 280   if (list == NULL) return;
 281   if (start == NULL) {
 282     start = _start;
 283   }
 284   // This code assumes that if the first block is well aligned, the following
 285   // blocks would naturally be well aligned (no need for adjustment)
 286   int size = 0;
 287   for (int i = 0; i < list->length(); i++) {
 288     size += list->at(i)->size();
 289   }
 290 
 291   LayoutRawBlock* candidate = NULL;
 292   if (start == last_block()) {
 293     candidate = last_block();
 294   } else {
 295     LayoutRawBlock* first = list->at(0);
 296     candidate = last_block()->prev_block();
 297     while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
 298       if (candidate == start) {
 299         candidate = last_block();
 300         break;
 301       }
 302       candidate = candidate->prev_block();
 303     }
 304     assert(candidate != NULL, "Candidate must not be null");
 305     assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
 306     assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
 307   }
 308 
 309   for (int i = 0; i < list->length(); i++) {
 310     LayoutRawBlock* b = list->at(i);
 311     insert_field_block(candidate, b);
 312     assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
 313   }
 314 }
 315 
 316 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
 317   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 318   if (slot->offset() % block->alignment() != 0) {
 319     int adjustment = block->alignment() - (slot->offset() % block->alignment());
 320     LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
 321     insert(slot, adj);
 322   }
 323   insert(slot, block);
 324   if (slot->size() == 0) {
 325     remove(slot);
 326   }
 327   FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset());
 328   return block;
 329 }
 330 
 331 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
 332   bool has_instance_fields = false;
 333   GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
 334   while (ik != NULL) {
 335     for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
 336       BasicType type = Signature::basic_type(fs.signature());
 337       // distinction between static and non-static fields is missing
 338       if (fs.access_flags().is_static()) continue;
 339       has_instance_fields = true;
 340       LayoutRawBlock* block;
 341       if (type == T_INLINE_TYPE) {
 342         InlineKlass* vk = InlineKlass::cast(ik->get_inline_type_field_klass(fs.index()));
 343         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, vk->get_exact_size_in_bytes(),
 344                                    vk->get_alignment(), false);
 345 
 346       } else {
 347         int size = type2aelembytes(type);
 348         // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
 349         block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
 350       }
 351       block->set_offset(fs.offset());
 352       all_fields->append(block);
 353     }
 354     ik = ik->super() == NULL ? NULL : InstanceKlass::cast(ik->super());
 355   }
 356   all_fields->sort(LayoutRawBlock::compare_offset);
 357   _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
 358   _blocks->set_offset(0);
 359   _last = _blocks;
 360   for(int i = 0; i < all_fields->length(); i++) {
 361     LayoutRawBlock* b = all_fields->at(i);
 362     _last->set_next_block(b);
 363     b->set_prev_block(_last);
 364     _last = b;
 365   }
 366   _start = _blocks;
 367   return has_instance_fields;
 368 }
 369 
 370 // Called during the reconstruction of a layout, after fields from super
 371 // classes have been inserted. It fills unused slots between inserted fields
 372 // with EMPTY blocks, so the regular field insertion methods would work.
 373 // This method handles classes with @Contended annotations differently
 374 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
 375 // fields to interfere with contended fields/classes.
 376 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
 377   assert(_blocks != NULL, "Sanity check");
 378   assert(_blocks->offset() == 0, "first block must be at offset zero");
 379   LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
 380   LayoutRawBlock* b = _blocks;
 381   while (b->next_block() != NULL) {
 382     if (b->next_block()->offset() > (b->offset() + b->size())) {
 383       int size = b->next_block()->offset() - (b->offset() + b->size());
 384       LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
 385       empty->set_offset(b->offset() + b->size());
 386       empty->set_next_block(b->next_block());
 387       b->next_block()->set_prev_block(empty);
 388       b->set_next_block(empty);
 389       empty->set_prev_block(b);
 390     }
 391     b = b->next_block();
 392   }
 393   assert(b->next_block() == NULL, "Invariant at this point");
 394   assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
 395   // If the super class has @Contended annotation, a padding block is
 396   // inserted at the end to ensure that fields from the subclasses won't share
 397   // the cache line of the last field of the contended class
 398   if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
 399     LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 400     p->set_offset(b->offset() + b->size());
 401     b->set_next_block(p);
 402     p->set_prev_block(b);
 403     b = p;
 404   }
 405   if (!UseEmptySlotsInSupers) {
 406     // Add an empty slots to align fields of the subclass on a heapOopSize boundary
 407     // in order to emulate the behavior of the previous algorithm
 408     int align = (b->offset() + b->size()) % heapOopSize;
 409     if (align != 0) {
 410       int sz = heapOopSize - align;
 411       LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz);
 412       p->set_offset(b->offset() + b->size());
 413       b->set_next_block(p);
 414       p->set_prev_block(b);
 415       b = p;
 416     }
 417   }
 418   LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
 419   last->set_offset(b->offset() + b->size());
 420   assert(last->offset() > 0, "Sanity check");
 421   b->set_next_block(last);
 422   last->set_prev_block(b);
 423   _last = last;
 424 }
 425 
 426 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
 427   assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
 428   assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
 429   block->set_offset(slot->offset());
 430   slot->set_offset(slot->offset() + block->size());
 431   assert((slot->size() - block->size()) < slot->size(), "underflow checking");
 432   assert(slot->size() - block->size() >= 0, "no negative size allowed");
 433   slot->set_size(slot->size() - block->size());
 434   block->set_prev_block(slot->prev_block());
 435   block->set_next_block(slot);
 436   slot->set_prev_block(block);
 437   if (block->prev_block() != NULL) {
 438     block->prev_block()->set_next_block(block);
 439   }
 440   if (_blocks == slot) {
 441     _blocks = block;
 442   }
 443   return block;
 444 }
 445 
 446 void FieldLayout::remove(LayoutRawBlock* block) {
 447   assert(block != NULL, "Sanity check");
 448   assert(block != _last, "Sanity check");
 449   if (_blocks == block) {
 450     _blocks = block->next_block();
 451     if (_blocks != NULL) {
 452       _blocks->set_prev_block(NULL);
 453     }
 454   } else {
 455     assert(block->prev_block() != NULL, "_prev should be set for non-head blocks");
 456     block->prev_block()->set_next_block(block->next_block());
 457     block->next_block()->set_prev_block(block->prev_block());
 458   }
 459   if (block == _start) {
 460     _start = block->prev_block();
 461   }
 462 }
 463 
 464 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
 465   ResourceMark rm;
 466   LayoutRawBlock* b = _blocks;
 467   while(b != _last) {
 468     switch(b->kind()) {
 469     case LayoutRawBlock::REGULAR: {
 470       FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
 471       output->print_cr(" @%d \"%s\" %s %d/%d %s",
 472                        b->offset(),
 473                        fi->name(_cp)->as_C_string(),
 474                        fi->signature(_cp)->as_C_string(),
 475                        b->size(),
 476                        b->alignment(),
 477                        "REGULAR");
 478       break;
 479     }
 480     case LayoutRawBlock::INLINED: {
 481       FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index());
 482       output->print_cr(" @%d \"%s\" %s %d/%d %s",
 483                        b->offset(),
 484                        fi->name(_cp)->as_C_string(),
 485                        fi->signature(_cp)->as_C_string(),
 486                        b->size(),
 487                        b->alignment(),
 488                        "INLINED");
 489       break;
 490     }
 491     case LayoutRawBlock::RESERVED: {
 492       output->print_cr(" @%d %d/- %s",
 493                        b->offset(),
 494                        b->size(),
 495                        "RESERVED");
 496       break;
 497     }
 498     case LayoutRawBlock::INHERITED: {
 499       assert(!is_static, "Static fields are not inherited in layouts");
 500       assert(super != NULL, "super klass must be provided to retrieve inherited fields info");
 501       bool found = false;
 502       const InstanceKlass* ik = super;
 503       while (!found && ik != NULL) {
 504         for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) {
 505           if (fs.offset() == b->offset()) {
 506             output->print_cr(" @%d \"%s\" %s %d/%d %s",
 507                 b->offset(),
 508                 fs.name()->as_C_string(),
 509                 fs.signature()->as_C_string(),
 510                 b->size(),
 511                 b->size(), // so far, alignment constraint == size, will change with Valhalla
 512                 "INHERITED");
 513             found = true;
 514             break;
 515           }
 516         }
 517         ik = ik->java_super();
 518       }
 519       break;
 520     }
 521     case LayoutRawBlock::EMPTY:
 522       output->print_cr(" @%d %d/1 %s",
 523                        b->offset(),
 524                        b->size(),
 525                        "EMPTY");
 526       break;
 527     case LayoutRawBlock::PADDING:
 528       output->print_cr(" @%d %d/1 %s",
 529                        b->offset(),
 530                        b->size(),
 531                        "PADDING");
 532       break;
 533     }
 534     b = b->next_block();
 535   }
 536 }
 537 
 538 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
 539                                        Array<u2>* fields, bool is_contended, bool is_inline_type, ClassLoaderData* class_loader_data,
 540                                        Handle protection_domain, FieldLayoutInfo* info) :
 541   _classname(classname),
 542   _super_klass(super_klass),
 543   _constant_pool(constant_pool),
 544   _fields(fields),
 545   _info(info),
 546   _root_group(NULL),
 547   _contended_groups(GrowableArray<FieldGroup*>(8)),
 548   _static_fields(NULL),
 549   _layout(NULL),
 550   _static_layout(NULL),
 551   _class_loader_data(class_loader_data),
 552   _protection_domain(protection_domain),
 553   _nonstatic_oopmap_count(0),
 554   _alignment(-1),
 555   _first_field_offset(-1),
 556   _exact_size_in_bytes(-1),
 557   _has_nonstatic_fields(false),
 558   _has_inline_type_fields(false),
 559   _is_contended(is_contended),
 560   _is_inline_type(is_inline_type),
 561   _has_flattening_information(is_inline_type),
 562   _has_nonatomic_values(false),
 563   _atomic_field_count(0)
 564  {}
 565 
 566 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
 567   assert(g > 0, "must only be called for named contended groups");
 568   FieldGroup* fg = NULL;
 569   for (int i = 0; i < _contended_groups.length(); i++) {
 570     fg = _contended_groups.at(i);
 571     if (fg->contended_group() == g) return fg;
 572   }
 573   fg = new FieldGroup(g);
 574   _contended_groups.append(fg);
 575   return fg;
 576 }
 577 
 578 void FieldLayoutBuilder::prologue() {
 579   _layout = new FieldLayout(_fields, _constant_pool);
 580   const InstanceKlass* super_klass = _super_klass;
 581   _layout->initialize_instance_layout(super_klass);
 582   if (super_klass != NULL) {
 583     _has_nonstatic_fields = super_klass->has_nonstatic_fields();
 584   }
 585   _static_layout = new FieldLayout(_fields, _constant_pool);
 586   _static_layout->initialize_static_layout();
 587   _static_fields = new FieldGroup();
 588   _root_group = new FieldGroup();
 589 }
 590 
 591 // Field sorting for regular (non-inline) classes:
 592 //   - fields are sorted in static and non-static fields
 593 //   - non-static fields are also sorted according to their contention group
 594 //     (support of the @Contended annotation)
 595 //   - @Contended annotation is ignored for static fields
 596 //   - field flattening decisions are taken in this method
 597 void FieldLayoutBuilder::regular_field_sorting() {
 598   for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
 599     FieldGroup* group = NULL;
 600     if (fs.access_flags().is_static()) {
 601       group = _static_fields;
 602     } else {
 603       _has_nonstatic_fields = true;
 604       _atomic_field_count++;  // we might decrement this
 605       if (fs.is_contended()) {
 606         int g = fs.contended_group();
 607         if (g == 0) {
 608           group = new FieldGroup(true);
 609           _contended_groups.append(group);
 610         } else {
 611           group = get_or_create_contended_group(g);
 612         }
 613       } else {
 614         group = _root_group;
 615       }
 616     }
 617     assert(group != NULL, "invariant");
 618     BasicType type = Signature::basic_type(fs.signature());
 619     switch(type) {
 620     case T_BYTE:
 621     case T_CHAR:
 622     case T_DOUBLE:
 623     case T_FLOAT:
 624     case T_INT:
 625     case T_LONG:
 626     case T_SHORT:
 627     case T_BOOLEAN:
 628       group->add_primitive_field(fs, type);
 629       break;
 630     case T_OBJECT:
 631     case T_ARRAY:
 632       if (group != _static_fields) _nonstatic_oopmap_count++;
 633       group->add_oop_field(fs);
 634       break;
 635     case T_INLINE_TYPE:
 636       _has_inline_type_fields = true;
 637       if (group == _static_fields) {
 638         // static fields are never inlined
 639         group->add_oop_field(fs);
 640       } else {
 641         _has_flattening_information = true;
 642         // Flattening decision to be taken here
 643         // This code assumes all verification already have been performed
 644         // (field's type has been loaded and it is an inline klass)
 645         JavaThread* THREAD = JavaThread::current();
 646         Klass* klass =
 647             SystemDictionary::resolve_inline_type_field_or_fail(&fs,
 648                                                                 Handle(THREAD, _class_loader_data->class_loader()),
 649                                                                 _protection_domain, true, THREAD);
 650         assert(klass != NULL, "Sanity check");
 651         InlineKlass* vk = InlineKlass::cast(klass);
 652         bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
 653                                    (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
 654         bool too_atomic_to_flatten = vk->is_declared_atomic() || AlwaysAtomicAccesses;
 655         bool too_volatile_to_flatten = fs.access_flags().is_volatile();
 656         if (vk->is_naturally_atomic()) {
 657           too_atomic_to_flatten = false;
 658           //too_volatile_to_flatten = false; //FIXME
 659           // volatile fields are currently never inlined, this could change in the future
 660         }
 661         if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten) || fs.access_flags().is_final()) {
 662           group->add_inlined_field(fs, vk);
 663           _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 664           fs.set_inlined(true);
 665           if (!vk->is_atomic()) {  // flat and non-atomic: take note
 666             _has_nonatomic_values = true;
 667             _atomic_field_count--;  // every other field is atomic but this one
 668           }
 669         } else {
 670           _nonstatic_oopmap_count++;
 671           group->add_oop_field(fs);
 672         }
 673       }
 674       break;
 675     default:
 676       fatal("Something wrong?");
 677     }
 678   }
 679   _root_group->sort_by_size();
 680   _static_fields->sort_by_size();
 681   if (!_contended_groups.is_empty()) {
 682     for (int i = 0; i < _contended_groups.length(); i++) {
 683       _contended_groups.at(i)->sort_by_size();
 684     }
 685   }
 686 }
 687 
 688 /* Field sorting for inline classes:
 689  *   - because inline classes are immutable, the @Contended annotation is ignored
 690  *     when computing their layout (with only read operation, there's no false
 691  *     sharing issue)
 692  *   - this method also records the alignment of the field with the most
 693  *     constraining alignment, this value is then used as the alignment
 694  *     constraint when flattening this inline type into another container
 695  *   - field flattening decisions are taken in this method (those decisions are
 696  *     currently only based in the size of the fields to be inlined, the size
 697  *     of the resulting instance is not considered)
 698  */
 699 void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) {
 700   assert(_is_inline_type, "Should only be used for inline classes");
 701   int alignment = 1;
 702   for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) {
 703     FieldGroup* group = NULL;
 704     int field_alignment = 1;
 705     if (fs.access_flags().is_static()) {
 706       group = _static_fields;
 707     } else {
 708       _has_nonstatic_fields = true;
 709       _atomic_field_count++;  // we might decrement this
 710       group = _root_group;
 711     }
 712     assert(group != NULL, "invariant");
 713     BasicType type = Signature::basic_type(fs.signature());
 714     switch(type) {
 715     case T_BYTE:
 716     case T_CHAR:
 717     case T_DOUBLE:
 718     case T_FLOAT:
 719     case T_INT:
 720     case T_LONG:
 721     case T_SHORT:
 722     case T_BOOLEAN:
 723       if (group != _static_fields) {
 724         field_alignment = type2aelembytes(type); // alignment == size for primitive types
 725       }
 726       group->add_primitive_field(fs, type);
 727       break;
 728     case T_OBJECT:
 729     case T_ARRAY:
 730       if (group != _static_fields) {
 731         _nonstatic_oopmap_count++;
 732         field_alignment = type2aelembytes(type); // alignment == size for oops
 733       }
 734       group->add_oop_field(fs);
 735       break;
 736     case T_INLINE_TYPE: {
 737 //      fs.set_inline(true);
 738       _has_inline_type_fields = true;
 739       if (group == _static_fields) {
 740         // static fields are never inlined
 741         group->add_oop_field(fs);
 742       } else {
 743         // Flattening decision to be taken here
 744         // This code assumes all verifications have already been performed
 745         // (field's type has been loaded and it is an inline klass)
 746         JavaThread* THREAD = JavaThread::current();
 747         Klass* klass =
 748             SystemDictionary::resolve_inline_type_field_or_fail(&fs,
 749                 Handle(THREAD, _class_loader_data->class_loader()),
 750                 _protection_domain, true, CHECK);
 751         assert(klass != NULL, "Sanity check");
 752         InlineKlass* vk = InlineKlass::cast(klass);
 753         bool too_big_to_flatten = (InlineFieldMaxFlatSize >= 0 &&
 754                                    (vk->size_helper() * HeapWordSize) > InlineFieldMaxFlatSize);
 755         bool too_atomic_to_flatten = vk->is_declared_atomic() || AlwaysAtomicAccesses;
 756         bool too_volatile_to_flatten = fs.access_flags().is_volatile();
 757         if (vk->is_naturally_atomic()) {
 758           too_atomic_to_flatten = false;
 759           //too_volatile_to_flatten = false; //FIXME
 760           // volatile fields are currently never inlined, this could change in the future
 761         }
 762         if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten) || fs.access_flags().is_final()) {
 763           group->add_inlined_field(fs, vk);
 764           _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
 765           field_alignment = vk->get_alignment();
 766           fs.set_inlined(true);
 767           if (!vk->is_atomic()) {  // flat and non-atomic: take note
 768             _has_nonatomic_values = true;
 769             _atomic_field_count--;  // every other field is atomic but this one
 770           }
 771         } else {
 772           _nonstatic_oopmap_count++;
 773           field_alignment = type2aelembytes(T_OBJECT);
 774           group->add_oop_field(fs);
 775         }
 776       }
 777       break;
 778     }
 779     default:
 780       fatal("Unexpected BasicType");
 781     }
 782     if (!fs.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
 783   }
 784   _alignment = alignment;
 785   if (!_has_nonstatic_fields) {
 786     // There are a number of fixes required throughout the type system and JIT
 787     Exceptions::fthrow(THREAD_AND_LOCATION,
 788                        vmSymbols::java_lang_ClassFormatError(),
 789                        "Value Types do not support zero instance size yet");
 790     return;
 791   }
 792 }
 793 
 794 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
 795   if (ContendedPaddingWidth > 0) {
 796     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
 797     _layout->insert(slot, padding);
 798   }
 799 }
 800 
 801 /* Computation of regular classes layout is an evolution of the previous default layout
 802  * (FieldAllocationStyle 1):
 803  *   - primitive fields (both primitive types and flattened inline types) are allocated
 804  *     first, from the biggest to the smallest
 805  *   - then oop fields are allocated (to increase chances to have contiguous oops and
 806  *     a simpler oopmap).
 807  */
 808 void FieldLayoutBuilder::compute_regular_layout() {
 809   bool need_tail_padding = false;
 810   prologue();
 811   regular_field_sorting();
 812   if (_is_contended) {
 813     _layout->set_start(_layout->last_block());
 814     // insertion is currently easy because the current strategy doesn't try to fill holes
 815     // in super classes layouts => the _start block is by consequence the _last_block
 816     insert_contended_padding(_layout->start());
 817     need_tail_padding = true;
 818   }
 819   _layout->add(_root_group->big_primitive_fields());
 820   _layout->add(_root_group->small_primitive_fields());
 821   _layout->add(_root_group->oop_fields());
 822 
 823   if (!_contended_groups.is_empty()) {
 824     for (int i = 0; i < _contended_groups.length(); i++) {
 825       FieldGroup* cg = _contended_groups.at(i);
 826       LayoutRawBlock* start = _layout->last_block();
 827       insert_contended_padding(start);
 828       _layout->add(cg->big_primitive_fields());
 829       _layout->add(cg->small_primitive_fields(), start);
 830       _layout->add(cg->oop_fields(), start);
 831       need_tail_padding = true;
 832     }
 833   }
 834 
 835   if (need_tail_padding) {
 836     insert_contended_padding(_layout->last_block());
 837   }
 838   // Warning: IntanceMirrorKlass expects static oops to be allocated first
 839   _static_layout->add_contiguously(_static_fields->oop_fields());
 840   _static_layout->add(_static_fields->big_primitive_fields());
 841   _static_layout->add(_static_fields->small_primitive_fields());
 842 
 843   epilogue();
 844 }
 845 
 846 /* Computation of inline classes has a slightly different strategy than for
 847  * regular classes. Regular classes have their oop fields allocated at the end
 848  * of the layout to increase GC performances. Unfortunately, this strategy
 849  * increases the number of empty slots inside an instance. Because the purpose
 850  * of inline classes is to be embedded into other containers, it is critical
 851  * to keep their size as small as possible. For this reason, the allocation
 852  * strategy is:
 853  *   - big primitive fields (primitive types and flattened inline type smaller
 854  *     than an oop) are allocated first (from the biggest to the smallest)
 855  *   - then oop fields
 856  *   - then small primitive fields (from the biggest to the smallest)
 857  */
 858 void FieldLayoutBuilder::compute_inline_class_layout(TRAPS) {
 859   prologue();
 860   inline_class_field_sorting(CHECK);
 861   // Inline types are not polymorphic, so they cannot inherit fields.
 862   // By consequence, at this stage, the layout must be composed of a RESERVED
 863   // block, followed by an EMPTY block.
 864   assert(_layout->start()->kind() == LayoutRawBlock::RESERVED, "Unexpected");
 865   assert(_layout->start()->next_block()->kind() == LayoutRawBlock::EMPTY, "Unexpected");
 866   LayoutRawBlock* first_empty = _layout->start()->next_block();
 867   if (first_empty->offset() % _alignment != 0) {
 868     LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _alignment - (first_empty->offset() % _alignment));
 869     _layout->insert(first_empty, padding);
 870     _layout->set_start(padding->next_block());
 871   }
 872 
 873   _layout->add(_root_group->big_primitive_fields());
 874   _layout->add(_root_group->oop_fields());
 875   _layout->add(_root_group->small_primitive_fields());
 876 
 877   LayoutRawBlock* first_field = _layout->first_field_block();
 878    if (first_field != NULL) {
 879      _first_field_offset = _layout->first_field_block()->offset();
 880      _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
 881    } else {
 882      // special case for empty value types
 883      _first_field_offset = _layout->blocks()->size();
 884      _exact_size_in_bytes = 0;
 885    }
 886   _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
 887 
 888   // Warning:: InstanceMirrorKlass expects static oops to be allocated first
 889   _static_layout->add_contiguously(_static_fields->oop_fields());
 890   _static_layout->add(_static_fields->big_primitive_fields());
 891   _static_layout->add(_static_fields->small_primitive_fields());
 892 
 893   epilogue();
 894 }
 895 
 896 void FieldLayoutBuilder::add_inlined_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
 897                 InlineKlass* vklass, int offset) {
 898   int diff = offset - vklass->first_field_offset();
 899   const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
 900   const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
 901   while (map < last_map) {
 902     nonstatic_oop_maps->add(map->offset() + diff, map->count());
 903     map++;
 904   }
 905 }
 906 
 907 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
 908   if (list != NULL) {
 909     for (int i = 0; i < list->length(); i++) {
 910       LayoutRawBlock* f = list->at(i);
 911       if (f->kind() == LayoutRawBlock::INLINED) {
 912         InlineKlass* vk = f->inline_klass();
 913         assert(vk != NULL, "Should have been initialized");
 914         if (vk->contains_oops()) {
 915           add_inlined_field_oopmap(nonstatic_oop_maps, vk, f->offset());
 916         }
 917       }
 918     }
 919   }
 920 }
 921 
 922 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
 923   if (group->oop_fields() != NULL) {
 924     for (int i = 0; i < group->oop_fields()->length(); i++) {
 925       LayoutRawBlock* b = group->oop_fields()->at(i);
 926       nonstatic_oop_maps->add(b->offset(), 1);
 927     }
 928   }
 929   register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
 930   register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
 931 }
 932 
 933 void FieldLayoutBuilder::epilogue() {
 934   // Computing oopmaps
 935   int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
 936   int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
 937   OopMapBlocksBuilder* nonstatic_oop_maps =
 938       new OopMapBlocksBuilder(max_oop_map_count);
 939   if (super_oop_map_count > 0) {
 940     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
 941     _super_klass->nonstatic_oop_map_count());
 942   }
 943   register_embedded_oops(nonstatic_oop_maps, _root_group);
 944   if (!_contended_groups.is_empty()) {
 945     for (int i = 0; i < _contended_groups.length(); i++) {
 946       FieldGroup* cg = _contended_groups.at(i);
 947       if (cg->oop_count() > 0) {
 948         assert(cg->oop_fields() != NULL && cg->oop_fields()->at(0) != NULL, "oop_count > 0 but no oop fields found");
 949         register_embedded_oops(nonstatic_oop_maps, cg);
 950       }
 951     }
 952   }
 953   nonstatic_oop_maps->compact();
 954 
 955   int instance_end = align_up(_layout->last_block()->offset(), wordSize);
 956   int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
 957   int static_fields_size = (static_fields_end -
 958       InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
 959   int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
 960 
 961   // Pass back information needed for InstanceKlass creation
 962 
 963   _info->oop_map_blocks = nonstatic_oop_maps;
 964   _info->_instance_size = align_object_size(instance_end / wordSize);
 965   _info->_static_field_size = static_fields_size;
 966   _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
 967   _info->_has_nonstatic_fields = _has_nonstatic_fields;
 968   _info->_has_inline_fields = _has_inline_type_fields;
 969 
 970   // An inline type is naturally atomic if it has just one field, and
 971   // that field is simple enough.
 972   _info->_is_naturally_atomic = (_is_inline_type &&
 973                                  (_atomic_field_count <= 1) &&
 974                                  !_has_nonatomic_values &&
 975                                  _contended_groups.is_empty());
 976   // This may be too restrictive, since if all the fields fit in 64
 977   // bits we could make the decision to align instances of this class
 978   // to 64-bit boundaries, and load and store them as single words.
 979   // And on machines which supported larger atomics we could similarly
 980   // allow larger values to be atomic, if properly aligned.
 981 
 982 
 983   if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
 984     ResourceMark rm;
 985     tty->print_cr("Layout of class %s", _classname->as_C_string());
 986     tty->print_cr("Instance fields:");
 987     _layout->print(tty, false, _super_klass);
 988     tty->print_cr("Static fields:");
 989     _static_layout->print(tty, true, NULL);
 990     tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
 991     if (_is_inline_type) {
 992       tty->print_cr("First field offset = %d", _first_field_offset);
 993       tty->print_cr("Alignment = %d bytes", _alignment);
 994       tty->print_cr("Exact size = %d bytes", _exact_size_in_bytes);
 995     }
 996     tty->print_cr("---");
 997   }
 998 }
 999 
1000 void FieldLayoutBuilder::build_layout(TRAPS) {
1001   if (_is_inline_type) {
1002     compute_inline_class_layout(CHECK);
1003   } else {
1004     compute_regular_layout();
1005   }
1006 }