< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page
@@ -28,10 +28,12 @@
  #include "c1/c1_Compilation.hpp"
  #include "c1/c1_GraphBuilder.hpp"
  #include "c1/c1_InstructionPrinter.hpp"
  #include "ci/ciCallSite.hpp"
  #include "ci/ciField.hpp"
+ #include "ci/ciFlatArrayKlass.hpp"
+ #include "ci/ciInlineKlass.hpp"
  #include "ci/ciKlass.hpp"
  #include "ci/ciMemberName.hpp"
  #include "ci/ciSymbols.hpp"
  #include "ci/ciUtilities.inline.hpp"
  #include "classfile/javaClasses.hpp"

@@ -1061,11 +1063,19 @@
  }
  
  
  void GraphBuilder::load_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
-   ValueStack* state_before = copy_state_indexed_access();
+   ValueStack* state_before = nullptr;
+   int array_idx = state()->stack_size() - 2;
+   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
+     // Save the entire state and re-execute on deopt when accessing flat arrays
+     state_before = copy_state_before();
+     state_before->set_should_reexecute(true);
+   } else {
+     state_before = copy_state_indexed_access();
+   }
    compilation()->set_has_access_indexed(true);
    Value index = ipop();
    Value array = apop();
    Value length = nullptr;
    if (CSEArrayLength ||

@@ -1073,17 +1083,85 @@
        (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
        (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
        (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
      length = append(new ArrayLength(array, state_before));
    }
-   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
+ 
+   bool need_membar = false;
+   LoadIndexed* load_indexed = nullptr;
+   Instruction* result = nullptr;
+   if (array->is_loaded_flat_array()) {
+     ciType* array_type = array->declared_type();
+     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
+ 
+     bool can_delay_access = false;
+     ciBytecodeStream s(method());
+     s.force_bci(bci());
+     s.next();
+     if (s.cur_bc() == Bytecodes::_getfield) {
+       bool will_link;
+       ciField* next_field = s.get_field(will_link);
+       bool next_needs_patching = !next_field->holder()->is_initialized() ||
+                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
+                                  PatchALot;
+       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
+     }
+     if (can_delay_access) {
+       // potentially optimizable array access, storing information for delayed decision
+       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
+       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
+       li->set_delayed(dli);
+       set_pending_load_indexed(dli);
+       return; // Nothing else to do for now
+     } else {
+       if (elem_klass->is_empty()) {
+         // No need to create a new instance, the default instance will be used instead
+         load_indexed = new LoadIndexed(array, index, length, type, state_before);
+         apush(append(load_indexed));
+       } else {
+         NewInstance* new_instance = new NewInstance(elem_klass, state_before, false, true);
+         _memory->new_instance(new_instance);
+         apush(append_split(new_instance));
+         load_indexed = new LoadIndexed(array, index, length, type, state_before);
+         load_indexed->set_vt(new_instance);
+         // The LoadIndexed node will initialise this instance by copying from
+         // the flat field.  Ensure these stores are visible before any
+         // subsequent store that publishes this reference.
+         need_membar = true;
+       }
+     }
+   } else {
+     load_indexed = new LoadIndexed(array, index, length, type, state_before);
+     if (profile_array_accesses() && is_reference_type(type)) {
+       compilation()->set_would_profile(true);
+       load_indexed->set_should_profile(true);
+       load_indexed->set_profiled_method(method());
+       load_indexed->set_profiled_bci(bci());
+     }
+   }
+   result = append(load_indexed);
+   if (need_membar) {
+     append(new MemBar(lir_membar_storestore));
+   }
+   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
+   if (!array->is_loaded_flat_array()) {
+     push(as_ValueType(type), result);
+   }
  }
  
  
  void GraphBuilder::store_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
-   ValueStack* state_before = copy_state_indexed_access();
+   ValueStack* state_before = nullptr;
+   int array_idx = state()->stack_size() - 3;
+   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
+     // Save the entire state and re-execute on deopt when accessing flat arrays
+     state_before = copy_state_before();
+     state_before->set_should_reexecute(true);
+   } else {
+     state_before = copy_state_indexed_access();
+   }
    compilation()->set_has_access_indexed(true);
    Value value = pop(as_ValueType(type));
    Value index = ipop();
    Value array = apop();
    Value length = nullptr;

@@ -1104,36 +1182,32 @@
        value = append(new LogicOp(Bytecodes::_iand, value, mask));
      }
    } else if (type == T_BYTE) {
      check_boolean = true;
    }
-   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
-   append(result);
-   _memory->store_value(value);
  
-   if (type == T_OBJECT && is_profiling()) {
-     // Note that we'd collect profile data in this method if we wanted it.
+   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
+   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
      compilation()->set_would_profile(true);
- 
-     if (profile_checkcasts()) {
-       result->set_profiled_method(method());
-       result->set_profiled_bci(bci());
-       result->set_should_profile(true);
-     }
+     store_indexed->set_should_profile(true);
+     store_indexed->set_profiled_method(method());
+     store_indexed->set_profiled_bci(bci());
    }
+   Instruction* result = append(store_indexed);
+   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
+   _memory->store_value(value);
  }
  
- 
  void GraphBuilder::stack_op(Bytecodes::Code code) {
    switch (code) {
      case Bytecodes::_pop:
-       { state()->raw_pop();
+       { Value w = state()->raw_pop();
        }
        break;
      case Bytecodes::_pop2:
-       { state()->raw_pop();
-         state()->raw_pop();
+       { Value w1 = state()->raw_pop();
+         Value w2 = state()->raw_pop();
        }
        break;
      case Bytecodes::_dup:
        { Value w = state()->raw_pop();
          state()->raw_push(w);

@@ -1308,13 +1382,40 @@
  
  void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
    BlockBegin* tsux = block_at(stream()->get_dest());
    BlockBegin* fsux = block_at(stream()->next_bci());
    bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
+ 
+   bool subst_check = false;
+   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
+     ValueType* left_vt = x->type();
+     ValueType* right_vt = y->type();
+     if (left_vt->is_object()) {
+       assert(right_vt->is_object(), "must be");
+       ciKlass* left_klass = x->as_loaded_klass_or_null();
+       ciKlass* right_klass = y->as_loaded_klass_or_null();
+ 
+       if (left_klass == nullptr || right_klass == nullptr) {
+         // The klass is still unloaded, or came from a Phi node. Go slow case;
+         subst_check = true;
+       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
+         // Either operand may be a value object, but we're not sure. Go slow case;
+         subst_check = true;
+       } else {
+         // No need to do substitutability check
+       }
+     }
+   }
+   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
+       is_profiling() && profile_branches()) {
+     compilation()->set_would_profile(true);
+     append(new ProfileACmpTypes(method(), bci(), x, y));
+   }
+ 
    // In case of loop invariant code motion or predicate insertion
    // before the body of a loop the state is needed
-   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
+   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
  
    assert(i->as_Goto() == nullptr ||
           (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
           (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
           "safepoint state of Goto returned by canonicalizer incorrect");

@@ -1561,11 +1662,11 @@
      call_register_finalizer();
    }
  
    // The conditions for a memory barrier are described in Parse::do_exits().
    bool need_mem_bar = false;
-   if (method()->name() == ciSymbols::object_initializer_name() &&
+   if (method()->is_object_constructor() &&
         (scope()->wrote_final() ||
           (AlwaysSafeConstructors && scope()->wrote_fields()) ||
           (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
      need_mem_bar = true;
    }

@@ -1712,20 +1813,34 @@
      default:
        return new Constant(value);
    }
  }
  
+ void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
+   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
+     ciField* inner_field = vk->nonstatic_field_at(i);
+     assert(!inner_field->is_flat(), "the iteration over nested fields is handled by the loop itself");
+     int off = inner_field->offset_in_bytes() - vk->first_field_offset();
+     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
+     Value replacement = append(load);
+     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
+     store->set_enclosing_field(enclosing_field);
+     append(store);
+   }
+ }
+ 
  void GraphBuilder::access_field(Bytecodes::Code code) {
    bool will_link;
    ciField* field = stream()->get_field(will_link);
    ciInstanceKlass* holder = field->holder();
    BasicType field_type = field->type()->basic_type();
    ValueType* type = as_ValueType(field_type);
+ 
    // call will_link again to determine if the field is valid.
    const bool needs_patching = !holder->is_loaded() ||
                                !field->will_link(method(), code) ||
-                               PatchALot;
+                               (!field->is_flat() && PatchALot);
  
    ValueStack* state_before = nullptr;
    if (!holder->is_initialized() || needs_patching) {
      // save state before instruction for debug info when
      // deoptimization happens during patching

@@ -1740,72 +1855,101 @@
      } else {
        obj = new Constant(new InstanceConstant(holder->java_mirror()));
      }
    }
  
-   if (field->is_final() && (code == Bytecodes::_putfield)) {
+   if (field->is_final() && code == Bytecodes::_putfield) {
      scope()->set_wrote_final();
    }
  
    if (code == Bytecodes::_putfield) {
      scope()->set_wrote_fields();
      if (field->is_volatile()) {
        scope()->set_wrote_volatile();
      }
    }
  
-   const int offset = !needs_patching ? field->offset_in_bytes() : -1;
+   int offset = !needs_patching ? field->offset_in_bytes() : -1;
    switch (code) {
      case Bytecodes::_getstatic: {
        // check for compile-time constants, i.e., initialized static final fields
        Value constant = nullptr;
        if (field->is_static_constant() && !PatchALot) {
          ciConstant field_value = field->constant_value();
          assert(!field->is_stable() || !field_value.is_null_or_zero(),
                 "stable static w/ default value shouldn't be a constant");
          constant = make_constant(field_value, field);
+       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
+                  field->type()->as_inline_klass()->is_empty()) {
+         // Loading from a field of an empty inline type. Just return the default instance.
+         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
        }
        if (constant != nullptr) {
          push(type, append(constant));
        } else {
          if (state_before == nullptr) {
            state_before = copy_state_for_exception();
          }
-         push(type, append(new LoadField(append(obj), offset, field, true,
-                                         state_before, needs_patching)));
+         LoadField* load_field = new LoadField(append(obj), offset, field, true,
+                                         state_before, needs_patching);
+         push(type, append(load_field));
        }
        break;
      }
      case Bytecodes::_putstatic: {
        Value val = pop(type);
        if (state_before == nullptr) {
          state_before = copy_state_for_exception();
        }
-       if (field->type()->basic_type() == T_BOOLEAN) {
+       if (field_type == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
+       if (field->is_null_free()) {
+         null_check(val);
+       }
+       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
+         // Storing to a field of an empty inline type. Ignore.
+         break;
+       }
        append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
        break;
      }
      case Bytecodes::_getfield: {
        // Check for compile-time constants, i.e., trusted final non-static fields.
        Value constant = nullptr;
-       obj = apop();
-       ObjectType* obj_type = obj->type()->as_ObjectType();
-       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
-         ciObject* const_oop = obj_type->constant_value();
-         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
-           ciConstant field_value = field->constant_value_of(const_oop);
-           if (field_value.is_valid()) {
-             constant = make_constant(field_value, field);
-             // For CallSite objects add a dependency for invalidation of the optimization.
-             if (field->is_call_site_target()) {
-               ciCallSite* call_site = const_oop->as_call_site();
-               if (!call_site->is_fully_initialized_constant_call_site()) {
-                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
-                 dependency_recorder()->assert_call_site_target_value(call_site, target);
+       if (state_before == nullptr && field->is_flat()) {
+         // Save the entire state and re-execute on deopt when accessing flat fields
+         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
+         state_before = copy_state_before();
+       }
+       if (!has_pending_field_access() && !has_pending_load_indexed()) {
+         obj = apop();
+         ObjectType* obj_type = obj->type()->as_ObjectType();
+         if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized()
+             && field->type()->as_inline_klass()->is_empty()) {
+           // Loading from a field of an empty inline type. Just return the default instance.
+           null_check(obj);
+           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
+         } else if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
+           ciObject* const_oop = obj_type->constant_value();
+           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
+             ciConstant field_value = field->constant_value_of(const_oop);
+             if (field_value.is_valid()) {
+               if (field->is_null_free() && field_value.is_null_or_zero()) {
+                 // Non-flat inline type field. Replace null by the default value.
+                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
+               } else {
+                 constant = make_constant(field_value, field);
+               }
+               // For CallSite objects add a dependency for invalidation of the optimization.
+               if (field->is_call_site_target()) {
+                 ciCallSite* call_site = const_oop->as_call_site();
+                 if (!call_site->is_fully_initialized_constant_call_site()) {
+                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
+                   dependency_recorder()->assert_call_site_target_value(call_site, target);
+                 }
                }
              }
            }
          }
        }

@@ -1813,62 +1957,158 @@
          push(type, append(constant));
        } else {
          if (state_before == nullptr) {
            state_before = copy_state_for_exception();
          }
-         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
-         Value replacement = !needs_patching ? _memory->load(load) : load;
-         if (replacement != load) {
-           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
-           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
-           // conversion. Emit an explicit conversion here to get the correct field value after the write.
-           BasicType bt = field->type()->basic_type();
-           switch (bt) {
-           case T_BOOLEAN:
-           case T_BYTE:
-             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
-             break;
-           case T_CHAR:
-             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
-             break;
-           case T_SHORT:
-             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
-             break;
-           default:
+         if (!field->is_flat()) {
+           if (has_pending_field_access()) {
+             assert(!needs_patching, "Can't patch delayed field access");
+             obj = pending_field_access()->obj();
+             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
+             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
+             assert(field != nullptr, "field not found");
+             set_pending_field_access(nullptr);
+           } else if (has_pending_load_indexed()) {
+             assert(!needs_patching, "Can't patch delayed field access");
+             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
+             LoadIndexed* li = pending_load_indexed()->load_instr();
+             li->set_type(type);
+             push(type, append(li));
+             set_pending_load_indexed(nullptr);
              break;
            }
-           push(type, replacement);
-         } else {
-           push(type, append(load));
+           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
+           Value replacement = !needs_patching ? _memory->load(load) : load;
+           if (replacement != load) {
+             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
+             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
+             // conversion. Emit an explicit conversion here to get the correct field value after the write.
+             switch (field_type) {
+             case T_BOOLEAN:
+             case T_BYTE:
+               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
+               break;
+             case T_CHAR:
+               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
+               break;
+             case T_SHORT:
+               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
+               break;
+             default:
+               break;
+             }
+             push(type, replacement);
+           } else {
+             push(type, append(load));
+           }
+         } else {  // field is flat
+           // Look at the next bytecode to check if we can delay the field access
+           bool can_delay_access = false;
+           ciBytecodeStream s(method());
+           s.force_bci(bci());
+           s.next();
+           if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
+             ciField* next_field = s.get_field(will_link);
+             bool next_needs_patching = !next_field->holder()->is_loaded() ||
+                                        !next_field->will_link(method(), Bytecodes::_getfield) ||
+                                        PatchALot;
+             can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
+           }
+           if (can_delay_access) {
+             if (has_pending_load_indexed()) {
+               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
+             } else if (has_pending_field_access()) {
+               pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
+             } else {
+               null_check(obj);
+               DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes());
+               set_pending_field_access(dfa);
+             }
+           } else {
+             ciInlineKlass* inline_klass = field->type()->as_inline_klass();
+             scope()->set_wrote_final();
+             scope()->set_wrote_fields();
+             bool need_membar = false;
+             if (inline_klass->is_initialized() && inline_klass->is_empty()) {
+               apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
+               if (has_pending_field_access()) {
+                 set_pending_field_access(nullptr);
+               } else if (has_pending_load_indexed()) {
+                 set_pending_load_indexed(nullptr);
+               }
+             } else if (has_pending_load_indexed()) {
+               assert(!needs_patching, "Can't patch delayed field access");
+               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
+               NewInstance* vt = new NewInstance(inline_klass, pending_load_indexed()->state_before(), false, true);
+               _memory->new_instance(vt);
+               pending_load_indexed()->load_instr()->set_vt(vt);
+               apush(append_split(vt));
+               append(pending_load_indexed()->load_instr());
+               set_pending_load_indexed(nullptr);
+               need_membar = true;
+             } else {
+               NewInstance* new_instance = new NewInstance(inline_klass, state_before, false, true);
+               _memory->new_instance(new_instance);
+               apush(append_split(new_instance));
+               assert(!needs_patching, "Can't patch flat inline type field access");
+               if (has_pending_field_access()) {
+                 copy_inline_content(inline_klass, pending_field_access()->obj(),
+                                     pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->first_field_offset(),
+                                     new_instance, inline_klass->first_field_offset(), state_before);
+                 set_pending_field_access(nullptr);
+               } else {
+                 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->first_field_offset(), state_before);
+               }
+               need_membar = true;
+             }
+             if (need_membar) {
+               // If we allocated a new instance ensure the stores to copy the
+               // field contents are visible before any subsequent store that
+               // publishes this reference.
+               append(new MemBar(lir_membar_storestore));
+             }
+           }
          }
        }
        break;
      }
      case Bytecodes::_putfield: {
        Value val = pop(type);
        obj = apop();
        if (state_before == nullptr) {
          state_before = copy_state_for_exception();
        }
-       if (field->type()->basic_type() == T_BOOLEAN) {
+       if (field_type == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
-       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
-       if (!needs_patching) store = _memory->store(store);
-       if (store != nullptr) {
-         append(store);
+       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
+         // Storing to a field of an empty inline type. Ignore.
+         null_check(obj);
+         null_check(val);
+       } else if (!field->is_flat()) {
+         if (field->is_null_free()) {
+           null_check(val);
+         }
+         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
+         if (!needs_patching) store = _memory->store(store);
+         if (store != nullptr) {
+           append(store);
+         }
+       } else {
+         assert(!needs_patching, "Can't patch flat inline type field access");
+         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
+         copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
        }
        break;
      }
      default:
        ShouldNotReachHere();
        break;
    }
  }
  
- 
  Dependencies* GraphBuilder::dependency_recorder() const {
    assert(DeoptC1, "need debug information");
    return compilation()->dependency_recorder();
  }
  

@@ -1981,11 +2221,11 @@
      // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
      ciKlass* receiver_constraint = nullptr;
  
      if (bc_raw == Bytecodes::_invokeinterface) {
        receiver_constraint = holder;
-     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
+     } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
        receiver_constraint = calling_klass;
      }
  
      if (receiver_constraint != nullptr) {
        int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);

@@ -2234,16 +2474,21 @@
  
  void GraphBuilder::new_instance(int klass_index) {
    ValueStack* state_before = copy_state_exhandling();
    ciKlass* klass = stream()->get_klass();
    assert(klass->is_instance_klass(), "must be an instance klass");
-   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
-   _memory->new_instance(new_instance);
-   apush(append_split(new_instance));
+   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
+       klass->as_inline_klass()->is_initialized() && klass->as_inline_klass()->is_empty()) {
+     ciInlineKlass* vk = klass->as_inline_klass();
+     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
+   } else {
+     NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass(), false);
+     _memory->new_instance(new_instance);
+     apush(append_split(new_instance));
+   }
  }
  
- 
  void GraphBuilder::new_type_array() {
    ValueStack* state_before = copy_state_exhandling();
    apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
  }
  

@@ -2312,14 +2557,33 @@
    }
  }
  
  
  void GraphBuilder::monitorenter(Value x, int bci) {
+   bool maybe_inlinetype = false;
+   if (bci == InvocationEntryBci) {
+     // Called by GraphBuilder::inline_sync_entry.
+ #ifdef ASSERT
+     ciType* obj_type = x->declared_type();
+     assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
+ #endif
+   } else {
+     // We are compiling a monitorenter bytecode
+     if (EnableValhalla) {
+       ciType* obj_type = x->declared_type();
+       if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
+         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
+         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
+         maybe_inlinetype = true;
+       }
+     }
+   }
+ 
    // save state before locking in case of deoptimization after a NullPointerException
    ValueStack* state_before = copy_state_for_exception_with_bci(bci);
    compilation()->set_has_monitors(true);
-   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
+   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
    kill_all();
  }
  
  
  void GraphBuilder::monitorexit(Value x, int bci) {

@@ -2463,10 +2727,11 @@
          if (!oc || !oc->value()->is_null_object()) {
            return;
          }
        }
      }
+     if (value->is_null_free()) return;
    }
    append(new NullCheck(value, copy_state_for_exception()));
  }
  
  

@@ -2488,11 +2753,13 @@
  
    assert(cur_state != nullptr, "state_before must be set");
    do {
      int cur_bci = cur_state->bci();
      assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
-     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
+     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
+            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
+ 
  
      // join with all potential exception handlers
      XHandlers* list = cur_scope_data->xhandlers();
      const int n = list->length();
      for (int i = 0; i < n; i++) {

@@ -3270,11 +3537,12 @@
  
    // Set up locals for receiver
    int idx = 0;
    if (!method()->is_static()) {
      // we should always see the receiver
-     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
+     state->store_local(idx, new Local(method()->holder(), objectType, idx,
+              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
      idx = 1;
    }
  
    // Set up locals for incoming arguments
    ciSignature* sig = method()->signature();

@@ -3282,11 +3550,11 @@
      ciType* type = sig->type_at(i);
      BasicType basic_type = type->basic_type();
      // don't allow T_ARRAY to propagate into locals types
      if (is_reference_type(basic_type)) basic_type = T_OBJECT;
      ValueType* vt = as_ValueType(basic_type);
-     state->store_local(idx, new Local(type, vt, idx, false));
+     state->store_local(idx, new Local(type, vt, idx, false, false));
      idx += type->size();
    }
  
    // lock synchronized method
    if (method()->is_synchronized()) {

@@ -3302,10 +3570,12 @@
    , _compilation(compilation)
    , _memory(new MemoryBuffer())
    , _inline_bailout_msg(nullptr)
    , _instruction_count(0)
    , _osr_entry(nullptr)
+   , _pending_field_access(nullptr)
+   , _pending_load_indexed(nullptr)
  {
    int osr_bci = compilation->osr_bci();
  
    // determine entry points and bci2block mapping
    BlockListBuilder blm(compilation, scope, osr_bci);
< prev index next >