< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.cpp

Print this page
*** 28,10 ***
--- 28,12 ---
  #include "c1/c1_Compilation.hpp"
  #include "c1/c1_GraphBuilder.hpp"
  #include "c1/c1_InstructionPrinter.hpp"
  #include "ci/ciCallSite.hpp"
  #include "ci/ciField.hpp"
+ #include "ci/ciFlatArrayKlass.hpp"
+ #include "ci/ciInlineKlass.hpp"
  #include "ci/ciKlass.hpp"
  #include "ci/ciMemberName.hpp"
  #include "ci/ciSymbols.hpp"
  #include "ci/ciUtilities.inline.hpp"
  #include "compiler/compilationPolicy.hpp"

*** 655,10 ***
--- 657,21 ---
      } else {
        _fields.at(index)->kill();
      }
    }
  
+   // Record this newly allocated object
+   void new_instance(NewInlineTypeInstance* object) {
+     int index = _newobjects.length();
+     _newobjects.append(object);
+     if (_fields.at_grow(index, NULL) == NULL) {
+       _fields.at_put(index, new FieldBuffer());
+     } else {
+       _fields.at(index)->kill();
+     }
+   }
+ 
    void store_value(Value value) {
      int index = _newobjects.find(value);
      if (index != -1) {
        // stored a newly allocated object into another object.
        // Assume we've lost track of it as separate slice of memory.

*** 937,16 ***
--- 950,26 ---
  
  void GraphBuilder::load_local(ValueType* type, int index) {
    Value x = state()->local_at(index);
    assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
    push(type, x);
+   if (x->as_NewInlineTypeInstance() != NULL && x->as_NewInlineTypeInstance()->in_larval_state()) {
+     if (x->as_NewInlineTypeInstance()->on_stack_count() == 1) {
+       x->as_NewInlineTypeInstance()->set_not_larva_anymore();
+     } else {
+       x->as_NewInlineTypeInstance()->increment_on_stack_count();
+     }
+   }
  }
  
  
  void GraphBuilder::store_local(ValueType* type, int index) {
    Value x = pop(type);
    store_local(state(), x, index);
+   if (x->as_NewInlineTypeInstance() != NULL) {
+     x->as_NewInlineTypeInstance()->set_local_index(index);
+   }
  }
  
  
  void GraphBuilder::store_local(ValueStack* state, Value x, int index) {
    if (parsing_jsr()) {

*** 971,16 ***
        scope_data()->set_jsr_return_address_local(-1);
      }
    }
  
    state->store_local(index, round_fp(x));
  }
  
  
  void GraphBuilder::load_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
!   ValueStack* state_before = copy_state_indexed_access();
    compilation()->set_has_access_indexed(true);
    Value index = ipop();
    Value array = apop();
    Value length = NULL;
    if (CSEArrayLength ||
--- 994,27 ---
        scope_data()->set_jsr_return_address_local(-1);
      }
    }
  
    state->store_local(index, round_fp(x));
+   if (x->as_NewInlineTypeInstance() != NULL) {
+     x->as_NewInlineTypeInstance()->set_local_index(index);
+   }
  }
  
  
  void GraphBuilder::load_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
!   ValueStack* state_before = NULL;
+   int array_idx = state()->stack_size() - 2;
+   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
+     // Save the entire state and re-execute on deopt when accessing flattened arrays
+     state_before = copy_state_before();
+     state_before->set_should_reexecute(true);
+   } else {
+     state_before = copy_state_indexed_access();
+   }
    compilation()->set_has_access_indexed(true);
    Value index = ipop();
    Value array = apop();
    Value length = NULL;
    if (CSEArrayLength ||

*** 988,17 ***
        (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
        (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
        (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
      length = append(new ArrayLength(array, state_before));
    }
!   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
  }
  
  
  void GraphBuilder::store_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
!   ValueStack* state_before = copy_state_indexed_access();
    compilation()->set_has_access_indexed(true);
    Value value = pop(as_ValueType(type));
    Value index = ipop();
    Value array = apop();
    Value length = NULL;
--- 1022,85 ---
        (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
        (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
        (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
      length = append(new ArrayLength(array, state_before));
    }
! 
+   bool need_membar = false;
+   LoadIndexed* load_indexed = NULL;
+   Instruction* result = NULL;
+   if (array->is_loaded_flattened_array()) {
+     ciType* array_type = array->declared_type();
+     ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
+ 
+     bool can_delay_access = false;
+     ciBytecodeStream s(method());
+     s.force_bci(bci());
+     s.next();
+     if (s.cur_bc() == Bytecodes::_getfield) {
+       bool will_link;
+       ciField* next_field = s.get_field(will_link);
+       bool next_needs_patching = !next_field->holder()->is_loaded() ||
+                                  !next_field->will_link(method(), Bytecodes::_getfield) ||
+                                  PatchALot;
+       can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
+     }
+     if (can_delay_access) {
+       // potentially optimizable array access, storing information for delayed decision
+       LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
+       DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
+       li->set_delayed(dli);
+       set_pending_load_indexed(dli);
+       return; // Nothing else to do for now
+     } else {
+       if (elem_klass->is_empty()) {
+         // No need to create a new instance, the default instance will be used instead
+         load_indexed = new LoadIndexed(array, index, length, type, state_before);
+         apush(append(load_indexed));
+       } else {
+         NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(elem_klass, state_before);
+         _memory->new_instance(new_instance);
+         apush(append_split(new_instance));
+         load_indexed = new LoadIndexed(array, index, length, type, state_before);
+         load_indexed->set_vt(new_instance);
+         // The LoadIndexed node will initialise this instance by copying from
+         // the flattened field.  Ensure these stores are visible before any
+         // subsequent store that publishes this reference.
+         need_membar = true;
+       }
+     }
+   } else {
+     load_indexed = new LoadIndexed(array, index, length, type, state_before);
+     if (profile_array_accesses() && is_reference_type(type)) {
+       compilation()->set_would_profile(true);
+       load_indexed->set_should_profile(true);
+       load_indexed->set_profiled_method(method());
+       load_indexed->set_profiled_bci(bci());
+     }
+   }
+   result = append(load_indexed);
+   if (need_membar) {
+     append(new MemBar(lir_membar_storestore));
+   }
+   assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
+   if (!array->is_loaded_flattened_array()) {
+     push(as_ValueType(type), result);
+   }
  }
  
  
  void GraphBuilder::store_indexed(BasicType type) {
    // In case of in block code motion in range check elimination
!   ValueStack* state_before = NULL;
+   int array_idx = state()->stack_size() - 3;
+   if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flattened_array()) {
+     // Save the entire state and re-execute on deopt when accessing flattened arrays
+     state_before = copy_state_before();
+     state_before->set_should_reexecute(true);
+   } else {
+     state_before = copy_state_indexed_access();
+   }
    compilation()->set_has_access_indexed(true);
    Value value = pop(as_ValueType(type));
    Value index = ipop();
    Value array = apop();
    Value length = NULL;

*** 1019,75 ***
        value = append(new LogicOp(Bytecodes::_iand, value, mask));
      }
    } else if (type == T_BYTE) {
      check_boolean = true;
    }
-   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
-   append(result);
-   _memory->store_value(value);
  
!   if (type == T_OBJECT && is_profiling()) {
!     // Note that we'd collect profile data in this method if we wanted it.
      compilation()->set_would_profile(true);
! 
!     if (profile_checkcasts()) {
!       result->set_profiled_method(method());
-       result->set_profiled_bci(bci());
-       result->set_should_profile(true);
-     }
    }
  }
  
- 
  void GraphBuilder::stack_op(Bytecodes::Code code) {
    switch (code) {
      case Bytecodes::_pop:
!       { state()->raw_pop();
        }
        break;
      case Bytecodes::_pop2:
!       { state()->raw_pop();
!         state()->raw_pop();
        }
        break;
      case Bytecodes::_dup:
        { Value w = state()->raw_pop();
          state()->raw_push(w);
          state()->raw_push(w);
        }
        break;
      case Bytecodes::_dup_x1:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          state()->raw_push(w1);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup_x2:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          Value w3 = state()->raw_pop();
          state()->raw_push(w1);
          state()->raw_push(w3);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup2:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          state()->raw_push(w2);
          state()->raw_push(w1);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup2_x1:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          Value w3 = state()->raw_pop();
          state()->raw_push(w2);
          state()->raw_push(w1);
          state()->raw_push(w3);
          state()->raw_push(w2);
          state()->raw_push(w1);
--- 1121,91 ---
        value = append(new LogicOp(Bytecodes::_iand, value, mask));
      }
    } else if (type == T_BYTE) {
      check_boolean = true;
    }
  
!   StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
!   if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flattened_array()) {
      compilation()->set_would_profile(true);
!     store_indexed->set_should_profile(true);
!     store_indexed->set_profiled_method(method());
!     store_indexed->set_profiled_bci(bci());
    }
+   Instruction* result = append(store_indexed);
+   assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
+   _memory->store_value(value);
  }
  
  void GraphBuilder::stack_op(Bytecodes::Code code) {
    switch (code) {
      case Bytecodes::_pop:
!       { Value w = state()->raw_pop();
+         update_larva_stack_count(w);
        }
        break;
      case Bytecodes::_pop2:
!       { Value w1 = state()->raw_pop();
!         Value w2 = state()->raw_pop();
+         update_larva_stack_count(w1);
+         update_larva_stack_count(w2);
        }
        break;
      case Bytecodes::_dup:
        { Value w = state()->raw_pop();
+         update_larval_state(w);
          state()->raw_push(w);
          state()->raw_push(w);
        }
        break;
      case Bytecodes::_dup_x1:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
+         update_larval_state(w1);
          state()->raw_push(w1);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup_x2:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          Value w3 = state()->raw_pop();
+         // special handling for the dup_x2/pop sequence (see JDK-8251046)
+         if (w1 != NULL && w1->as_NewInlineTypeInstance() != NULL) {
+           ciBytecodeStream s(method());
+           s.force_bci(bci());
+           s.next();
+           if (s.cur_bc() != Bytecodes::_pop) {
+             w1->as_NewInlineTypeInstance()->set_not_larva_anymore();
+           }  else {
+             w1->as_NewInlineTypeInstance()->increment_on_stack_count();
+            }
+         }
          state()->raw_push(w1);
          state()->raw_push(w3);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup2:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
+         update_larval_state(w1);
+         update_larval_state(w2);
          state()->raw_push(w2);
          state()->raw_push(w1);
          state()->raw_push(w2);
          state()->raw_push(w1);
        }
        break;
      case Bytecodes::_dup2_x1:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          Value w3 = state()->raw_pop();
+         update_larval_state(w1);
+         update_larval_state(w2);
          state()->raw_push(w2);
          state()->raw_push(w1);
          state()->raw_push(w3);
          state()->raw_push(w2);
          state()->raw_push(w1);

*** 1096,10 ***
--- 1214,12 ---
      case Bytecodes::_dup2_x2:
        { Value w1 = state()->raw_pop();
          Value w2 = state()->raw_pop();
          Value w3 = state()->raw_pop();
          Value w4 = state()->raw_pop();
+         update_larval_state(w1);
+         update_larval_state(w2);
          state()->raw_push(w2);
          state()->raw_push(w1);
          state()->raw_push(w4);
          state()->raw_push(w3);
          state()->raw_push(w2);

*** 1223,13 ***
  
  void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
    BlockBegin* tsux = block_at(stream()->get_dest());
    BlockBegin* fsux = block_at(stream()->next_bci());
    bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
    // In case of loop invariant code motion or predicate insertion
    // before the body of a loop the state is needed
!   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : NULL, is_bb));
  
    assert(i->as_Goto() == NULL ||
           (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
           (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
           "safepoint state of Goto returned by canonicalizer incorrect");
--- 1343,40 ---
  
  void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
    BlockBegin* tsux = block_at(stream()->get_dest());
    BlockBegin* fsux = block_at(stream()->next_bci());
    bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
+ 
+   bool subst_check = false;
+   if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
+     ValueType* left_vt = x->type();
+     ValueType* right_vt = y->type();
+     if (left_vt->is_object()) {
+       assert(right_vt->is_object(), "must be");
+       ciKlass* left_klass = x->as_loaded_klass_or_null();
+       ciKlass* right_klass = y->as_loaded_klass_or_null();
+ 
+       if (left_klass == NULL || right_klass == NULL) {
+         // The klass is still unloaded, or came from a Phi node. Go slow case;
+         subst_check = true;
+       } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
+         // Either operand may be a value object, but we're not sure. Go slow case;
+         subst_check = true;
+       } else {
+         // No need to do substitutability check
+       }
+     }
+   }
+   if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
+       is_profiling() && profile_branches()) {
+     compilation()->set_would_profile(true);
+     append(new ProfileACmpTypes(method(), bci(), x, y));
+   }
+ 
    // In case of loop invariant code motion or predicate insertion
    // before the body of a loop the state is needed
!   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : NULL, is_bb, subst_check));
  
    assert(i->as_Goto() == NULL ||
           (i->as_Goto()->sux_at(0) == tsux  && i->as_Goto()->is_safepoint() == tsux->bci() < stream()->cur_bci()) ||
           (i->as_Goto()->sux_at(0) == fsux  && i->as_Goto()->is_safepoint() == fsux->bci() < stream()->cur_bci()),
           "safepoint state of Goto returned by canonicalizer incorrect");

*** 1476,11 ***
      call_register_finalizer();
    }
  
    // The conditions for a memory barrier are described in Parse::do_exits().
    bool need_mem_bar = false;
!   if (method()->name() == ciSymbols::object_initializer_name() &&
         (scope()->wrote_final() ||
           (AlwaysSafeConstructors && scope()->wrote_fields()) ||
           (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
      need_mem_bar = true;
    }
--- 1623,11 ---
      call_register_finalizer();
    }
  
    // The conditions for a memory barrier are described in Parse::do_exits().
    bool need_mem_bar = false;
!   if ((method()->is_object_constructor() || method()->is_static_init_factory()) &&
         (scope()->wrote_final() ||
           (AlwaysSafeConstructors && scope()->wrote_fields()) ||
           (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
      need_mem_bar = true;
    }

*** 1627,20 ***
      default:
        return new Constant(value);
    }
  }
  
  void GraphBuilder::access_field(Bytecodes::Code code) {
    bool will_link;
    ciField* field = stream()->get_field(will_link);
    ciInstanceKlass* holder = field->holder();
    BasicType field_type = field->type()->basic_type();
    ValueType* type = as_ValueType(field_type);
    // call will_link again to determine if the field is valid.
    const bool needs_patching = !holder->is_loaded() ||
                                !field->will_link(method(), code) ||
!                               PatchALot;
  
    ValueStack* state_before = NULL;
    if (!holder->is_initialized() || needs_patching) {
      // save state before instruction for debug info when
      // deoptimization happens during patching
--- 1774,35 ---
      default:
        return new Constant(value);
    }
  }
  
+ void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
+   assert(vk->nof_nonstatic_fields() > 0, "Empty inline type access should be removed");
+   for (int i = 0; i < vk->nof_nonstatic_fields(); i++) {
+     ciField* inner_field = vk->nonstatic_field_at(i);
+     assert(!inner_field->is_flattened(), "the iteration over nested fields is handled by the loop itself");
+     int off = inner_field->offset() - vk->first_field_offset();
+     LoadField* load = new LoadField(src, src_off + off, inner_field, false, state_before, false);
+     Value replacement = append(load);
+     StoreField* store = new StoreField(dest, dest_off + off, inner_field, replacement, false, state_before, false);
+     store->set_enclosing_field(enclosing_field);
+     append(store);
+   }
+ }
+ 
  void GraphBuilder::access_field(Bytecodes::Code code) {
    bool will_link;
    ciField* field = stream()->get_field(will_link);
    ciInstanceKlass* holder = field->holder();
    BasicType field_type = field->type()->basic_type();
    ValueType* type = as_ValueType(field_type);
+ 
    // call will_link again to determine if the field is valid.
    const bool needs_patching = !holder->is_loaded() ||
                                !field->will_link(method(), code) ||
!                               (!field->is_flattened() && PatchALot);
  
    ValueStack* state_before = NULL;
    if (!holder->is_initialized() || needs_patching) {
      // save state before instruction for debug info when
      // deoptimization happens during patching

*** 1655,72 ***
      } else {
        obj = new Constant(new InstanceConstant(holder->java_mirror()));
      }
    }
  
!   if (field->is_final() && (code == Bytecodes::_putfield)) {
      scope()->set_wrote_final();
    }
  
    if (code == Bytecodes::_putfield) {
      scope()->set_wrote_fields();
      if (field->is_volatile()) {
        scope()->set_wrote_volatile();
      }
    }
  
!   const int offset = !needs_patching ? field->offset() : -1;
    switch (code) {
      case Bytecodes::_getstatic: {
        // check for compile-time constants, i.e., initialized static final fields
        Value constant = NULL;
        if (field->is_static_constant() && !PatchALot) {
          ciConstant field_value = field->constant_value();
          assert(!field->is_stable() || !field_value.is_null_or_zero(),
                 "stable static w/ default value shouldn't be a constant");
          constant = make_constant(field_value, field);
        }
        if (constant != NULL) {
          push(type, append(constant));
        } else {
          if (state_before == NULL) {
            state_before = copy_state_for_exception();
          }
!         push(type, append(new LoadField(append(obj), offset, field, true,
!                                         state_before, needs_patching)));
        }
        break;
      }
      case Bytecodes::_putstatic: {
        Value val = pop(type);
        if (state_before == NULL) {
          state_before = copy_state_for_exception();
        }
!       if (field->type()->basic_type() == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
        append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
        break;
      }
      case Bytecodes::_getfield: {
        // Check for compile-time constants, i.e., trusted final non-static fields.
        Value constant = NULL;
!       obj = apop();
!       ObjectType* obj_type = obj->type()->as_ObjectType();
!       if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
!         ciObject* const_oop = obj_type->constant_value();
!         if (!const_oop->is_null_object() && const_oop->is_loaded()) {
!           ciConstant field_value = field->constant_value_of(const_oop);
!           if (field_value.is_valid()) {
!             constant = make_constant(field_value, field);
!             // For CallSite objects add a dependency for invalidation of the optimization.
!             if (field->is_call_site_target()) {
!               ciCallSite* call_site = const_oop->as_call_site();
!               if (!call_site->is_fully_initialized_constant_call_site()) {
!                 ciMethodHandle* target = field_value.as_object()->as_method_handle();
!                 dependency_recorder()->assert_call_site_target_value(call_site, target);
                }
              }
            }
          }
        }
--- 1817,97 ---
      } else {
        obj = new Constant(new InstanceConstant(holder->java_mirror()));
      }
    }
  
!   if (field->is_final() && code == Bytecodes::_putfield) {
      scope()->set_wrote_final();
    }
  
    if (code == Bytecodes::_putfield) {
      scope()->set_wrote_fields();
      if (field->is_volatile()) {
        scope()->set_wrote_volatile();
      }
    }
  
!   int offset = !needs_patching ? field->offset() : -1;
    switch (code) {
      case Bytecodes::_getstatic: {
        // check for compile-time constants, i.e., initialized static final fields
        Value constant = NULL;
        if (field->is_static_constant() && !PatchALot) {
          ciConstant field_value = field->constant_value();
          assert(!field->is_stable() || !field_value.is_null_or_zero(),
                 "stable static w/ default value shouldn't be a constant");
          constant = make_constant(field_value, field);
+       } else if (field->is_null_free() && field->type()->as_instance_klass()->is_initialized() &&
+                  field->type()->as_inline_klass()->is_empty()) {
+         // Loading from a field of an empty inline type. Just return the default instance.
+         constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
        }
        if (constant != NULL) {
          push(type, append(constant));
        } else {
          if (state_before == NULL) {
            state_before = copy_state_for_exception();
          }
!         LoadField* load_field = new LoadField(append(obj), offset, field, true,
!                                         state_before, needs_patching);
+         push(type, append(load_field));
        }
        break;
      }
      case Bytecodes::_putstatic: {
        Value val = pop(type);
        if (state_before == NULL) {
          state_before = copy_state_for_exception();
        }
!       if (field_type == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
+       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
+         // Storing to a field of an empty inline type. Ignore.
+         break;
+       }
        append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
        break;
      }
      case Bytecodes::_getfield: {
        // Check for compile-time constants, i.e., trusted final non-static fields.
        Value constant = NULL;
!       if (state_before == NULL && field->is_flattened()) {
!         // Save the entire state and re-execute on deopt when accessing flattened fields
!         assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
!         state_before = copy_state_before();
!       }
!       if (!has_pending_field_access() && !has_pending_load_indexed()) {
!         obj = apop();
!         ObjectType* obj_type = obj->type()->as_ObjectType();
!         if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
!           // Loading from a field of an empty inline type. Just return the default instance.
!           null_check(obj);
!           constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
!         } else if (field->is_constant() && !field->is_flattened() && obj_type->is_constant() && !PatchALot) {
!           ciObject* const_oop = obj_type->constant_value();
+           if (!const_oop->is_null_object() && const_oop->is_loaded()) {
+             ciConstant field_value = field->constant_value_of(const_oop);
+             if (field_value.is_valid()) {
+               if (field->is_null_free() && field_value.is_null_or_zero()) {
+                 // Non-flattened inline type field. Replace null by the default value.
+                 constant = new Constant(new InstanceConstant(field->type()->as_inline_klass()->default_instance()));
+               } else {
+                 constant = make_constant(field_value, field);
+               }
+               // For CallSite objects add a dependency for invalidation of the optimization.
+               if (field->is_call_site_target()) {
+                 ciCallSite* call_site = const_oop->as_call_site();
+                 if (!call_site->is_fully_initialized_constant_call_site()) {
+                   ciMethodHandle* target = field_value.as_object()->as_method_handle();
+                   dependency_recorder()->assert_call_site_target_value(call_site, target);
+                 }
                }
              }
            }
          }
        }

*** 1728,61 ***
          push(type, append(constant));
        } else {
          if (state_before == NULL) {
            state_before = copy_state_for_exception();
          }
!         LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
!         Value replacement = !needs_patching ? _memory->load(load) : load;
!         if (replacement != load) {
!           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
!           // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
!           // conversion. Emit an explicit conversion here to get the correct field value after the write.
!           BasicType bt = field->type()->basic_type();
!           switch (bt) {
!           case T_BOOLEAN:
!           case T_BYTE:
!             replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
!             break;
!           case T_CHAR:
!             replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
!             break;
-           case T_SHORT:
-             replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
-             break;
-           default:
              break;
            }
!           push(type, replacement);
          } else {
!           push(type, append(load));
          }
        }
        break;
      }
      case Bytecodes::_putfield: {
        Value val = pop(type);
        obj = apop();
        if (state_before == NULL) {
          state_before = copy_state_for_exception();
        }
!       if (field->type()->basic_type() == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
!       StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
!       if (!needs_patching) store = _memory->store(store);
!       if (store != NULL) {
!         append(store);
        }
        break;
      }
      default:
        ShouldNotReachHere();
        break;
    }
  }
  
  
  Dependencies* GraphBuilder::dependency_recorder() const {
    assert(DeoptC1, "need debug information");
    return compilation()->dependency_recorder();
  }
--- 1915,227 ---
          push(type, append(constant));
        } else {
          if (state_before == NULL) {
            state_before = copy_state_for_exception();
          }
!         if (!field->is_flattened()) {
!           if (has_pending_field_access()) {
!             assert(!needs_patching, "Can't patch delayed field access");
!             obj = pending_field_access()->obj();
!             offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->first_field_offset();
!             field = pending_field_access()->holder()->get_field_by_offset(offset, false);
!             assert(field != NULL, "field not found");
!             set_pending_field_access(NULL);
!           } else if (has_pending_load_indexed()) {
!             assert(!needs_patching, "Can't patch delayed field access");
!             pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
!             LoadIndexed* li = pending_load_indexed()->load_instr();
!             li->set_type(type);
!             push(type, append(li));
!             set_pending_load_indexed(NULL);
              break;
            }
!           LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
+           Value replacement = !needs_patching ? _memory->load(load) : load;
+           if (replacement != load) {
+             assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
+             // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
+             // conversion. Emit an explicit conversion here to get the correct field value after the write.
+             switch (field_type) {
+             case T_BOOLEAN:
+             case T_BYTE:
+               replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
+               break;
+             case T_CHAR:
+               replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
+               break;
+             case T_SHORT:
+               replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
+               break;
+             default:
+               break;
+             }
+             push(type, replacement);
+           } else {
+             push(type, append(load));
+           }
          } else {
!           // Look at the next bytecode to check if we can delay the field access
+           bool can_delay_access = false;
+           ciBytecodeStream s(method());
+           s.force_bci(bci());
+           s.next();
+           if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
+             ciField* next_field = s.get_field(will_link);
+             bool next_needs_patching = !next_field->holder()->is_loaded() ||
+                                        !next_field->will_link(method(), Bytecodes::_getfield) ||
+                                        PatchALot;
+             can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
+           }
+           if (can_delay_access) {
+             if (has_pending_load_indexed()) {
+               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
+             } else if (has_pending_field_access()) {
+               pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->first_field_offset());
+             } else {
+               null_check(obj);
+               DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset());
+               set_pending_field_access(dfa);
+             }
+           } else {
+             ciInlineKlass* inline_klass = field->type()->as_inline_klass();
+             scope()->set_wrote_final();
+             scope()->set_wrote_fields();
+             bool need_membar = false;
+             if (inline_klass->is_empty()) {
+               apush(append(new Constant(new InstanceConstant(inline_klass->default_instance()))));
+               if (has_pending_field_access()) {
+                 set_pending_field_access(NULL);
+               } else if (has_pending_load_indexed()) {
+                 set_pending_load_indexed(NULL);
+               }
+             } else if (has_pending_load_indexed()) {
+               assert(!needs_patching, "Can't patch delayed field access");
+               pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->first_field_offset());
+               NewInlineTypeInstance* vt = new NewInlineTypeInstance(inline_klass, pending_load_indexed()->state_before());
+               _memory->new_instance(vt);
+               pending_load_indexed()->load_instr()->set_vt(vt);
+               apush(append_split(vt));
+               append(pending_load_indexed()->load_instr());
+               set_pending_load_indexed(NULL);
+               need_membar = true;
+             } else {
+               NewInlineTypeInstance* new_instance = new NewInlineTypeInstance(inline_klass, state_before);
+               _memory->new_instance(new_instance);
+               apush(append_split(new_instance));
+               assert(!needs_patching, "Can't patch flattened inline type field access");
+               if (has_pending_field_access()) {
+                 copy_inline_content(inline_klass, pending_field_access()->obj(),
+                                     pending_field_access()->offset() + field->offset() - field->holder()->as_inline_klass()->first_field_offset(),
+                                     new_instance, inline_klass->first_field_offset(), state_before);
+                 set_pending_field_access(NULL);
+               } else {
+                 copy_inline_content(inline_klass, obj, field->offset(), new_instance, inline_klass->first_field_offset(), state_before);
+               }
+               need_membar = true;
+             }
+             if (need_membar) {
+               // If we allocated a new instance ensure the stores to copy the
+               // field contents are visible before any subsequent store that
+               // publishes this reference.
+               append(new MemBar(lir_membar_storestore));
+             }
+           }
          }
        }
        break;
      }
      case Bytecodes::_putfield: {
        Value val = pop(type);
        obj = apop();
        if (state_before == NULL) {
          state_before = copy_state_for_exception();
        }
!       if (field_type == T_BOOLEAN) {
          Value mask = append(new Constant(new IntConstant(1)));
          val = append(new LogicOp(Bytecodes::_iand, val, mask));
        }
!       if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty()) {
!         // Storing to a field of an empty inline type. Ignore.
!         null_check(obj);
!       } else if (!field->is_flattened()) {
+         StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
+         if (!needs_patching) store = _memory->store(store);
+         if (store != NULL) {
+           append(store);
+         }
+       } else {
+         assert(!needs_patching, "Can't patch flattened inline type field access");
+         ciInlineKlass* inline_klass = field->type()->as_inline_klass();
+         copy_inline_content(inline_klass, val, inline_klass->first_field_offset(), obj, offset, state_before, field);
        }
        break;
      }
      default:
        ShouldNotReachHere();
        break;
    }
  }
  
+ // Baseline version of withfield, allocate every time
+ void GraphBuilder::withfield(int field_index) {
+   // Save the entire state and re-execute on deopt
+   ValueStack* state_before = copy_state_before();
+   state_before->set_should_reexecute(true);
+ 
+   bool will_link;
+   ciField* field_modify = stream()->get_field(will_link);
+   ciInstanceKlass* holder = field_modify->holder();
+   BasicType field_type = field_modify->type()->basic_type();
+   ValueType* type = as_ValueType(field_type);
+   Value val = pop(type);
+   Value obj = apop();
+ 
+   if (!holder->is_loaded() || !holder->is_inlinetype()) {
+     apush(append_split(new Deoptimize(holder, state_before)));
+     return;
+   }
+ 
+   // call will_link again to determine if the field is valid.
+   const bool needs_patching = !field_modify->will_link(method(), Bytecodes::_withfield) ||
+                               (!field_modify->is_flattened() && PatchALot);
+   const int offset_modify = !needs_patching ? field_modify->offset() : -1;
+ 
+   scope()->set_wrote_final();
+   scope()->set_wrote_fields();
+ 
+   NewInlineTypeInstance* new_instance;
+   if (obj->as_NewInlineTypeInstance() != NULL && obj->as_NewInlineTypeInstance()->in_larval_state()) {
+     new_instance = obj->as_NewInlineTypeInstance();
+     apush(append_split(new_instance));
+   } else {
+     new_instance = new NewInlineTypeInstance(holder->as_inline_klass(), state_before);
+     _memory->new_instance(new_instance);
+     apush(append_split(new_instance));
+ 
+     // Initialize fields which are not modified
+     for (int i = 0; i < holder->nof_nonstatic_fields(); i++) {
+       ciField* field = holder->nonstatic_field_at(i);
+       int offset = field->offset();
+       // Don't use offset_modify here, it might be set to -1 if needs_patching
+       if (offset != field_modify->offset()) {
+         if (field->is_flattened()) {
+           ciInlineKlass* vk = field->type()->as_inline_klass();
+           if (!vk->is_empty()) {
+             copy_inline_content(vk, obj, offset, new_instance, vk->first_field_offset(), state_before, field);
+           }
+         } else {
+           LoadField* load = new LoadField(obj, offset, field, false, state_before, false);
+           Value replacement = append(load);
+           StoreField* store = new StoreField(new_instance, offset, field, replacement, false, state_before, false);
+           append(store);
+         }
+       }
+     }
+   }
+ 
+   // Field to modify
+   if (field_type == T_BOOLEAN) {
+     Value mask = append(new Constant(new IntConstant(1)));
+     val = append(new LogicOp(Bytecodes::_iand, val, mask));
+   }
+   if (field_modify->is_flattened()) {
+     assert(!needs_patching, "Can't patch flattened inline type field access");
+     ciInlineKlass* vk = field_modify->type()->as_inline_klass();
+     if (!vk->is_empty()) {
+       copy_inline_content(vk, val, vk->first_field_offset(), new_instance, offset_modify, state_before, field_modify);
+     }
+   } else {
+     StoreField* store = new StoreField(new_instance, offset_modify, field_modify, val, false, state_before, needs_patching);
+     append(store);
+   }
+ }
  
  Dependencies* GraphBuilder::dependency_recorder() const {
    assert(DeoptC1, "need debug information");
    return compilation()->dependency_recorder();
  }

*** 1864,11 ***
        log->elem("call method='%d' instr='%s'",
                  log->identify(target),
                  Bytecodes::name(code));
  
    // invoke-special-super
!   if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer()) {
      ciInstanceKlass* sender_klass = calling_klass;
      if (sender_klass->is_interface()) {
        int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
        Value receiver = state()->stack_at(index);
        CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());
--- 2217,11 ---
        log->elem("call method='%d' instr='%s'",
                  log->identify(target),
                  Bytecodes::name(code));
  
    // invoke-special-super
!   if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor()) {
      ciInstanceKlass* sender_klass = calling_klass;
      if (sender_klass->is_interface()) {
        int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
        Value receiver = state()->stack_at(index);
        CheckCast* c = new CheckCast(sender_klass, receiver, copy_state_before());

*** 2114,11 ***
          profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
        }
      }
    }
  
!   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
    // push result
    append_split(result);
  
    if (result_type != voidType) {
      push(result_type, round_fp(result));
--- 2467,12 ---
          profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
        }
      }
    }
  
!   Invoke* result = new Invoke(code, result_type, recv, args, target, state_before,
+                               declared_signature->returns_null_free_inline_type());
    // push result
    append_split(result);
  
    if (result_type != voidType) {
      push(result_type, round_fp(result));

*** 2137,22 ***
    NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
    _memory->new_instance(new_instance);
    apush(append_split(new_instance));
  }
  
  
  void GraphBuilder::new_type_array() {
    ValueStack* state_before = copy_state_exhandling();
    apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
  }
  
  
  void GraphBuilder::new_object_array() {
    bool will_link;
    ciKlass* klass = stream()->get_klass(will_link);
    ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
!   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
    apush(append_split(n));
  }
  
  
  bool GraphBuilder::direct_compare(ciKlass* k) {
--- 2491,34 ---
    NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
    _memory->new_instance(new_instance);
    apush(append_split(new_instance));
  }
  
+ void GraphBuilder::default_value(int klass_index) {
+   bool will_link;
+   ciKlass* klass = stream()->get_klass(will_link);
+   if (!stream()->is_unresolved_klass() && klass->is_inlinetype() &&
+       klass->as_inline_klass()->is_initialized()) {
+     ciInlineKlass* vk = klass->as_inline_klass();
+     apush(append(new Constant(new InstanceConstant(vk->default_instance()))));
+   } else {
+     apush(append_split(new Deoptimize(klass, copy_state_before())));
+   }
+ }
  
  void GraphBuilder::new_type_array() {
    ValueStack* state_before = copy_state_exhandling();
    apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
  }
  
  
  void GraphBuilder::new_object_array() {
    bool will_link;
    ciKlass* klass = stream()->get_klass(will_link);
+   bool null_free = stream()->has_Q_signature();
    ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
!   NewArray* n = new NewObjectArray(klass, ipop(), state_before, null_free);
    apush(append_split(n));
  }
  
  
  bool GraphBuilder::direct_compare(ciKlass* k) {

*** 2173,12 ***
  
  
  void GraphBuilder::check_cast(int klass_index) {
    bool will_link;
    ciKlass* klass = stream()->get_klass(will_link);
    ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
!   CheckCast* c = new CheckCast(klass, apop(), state_before);
    apush(append_split(c));
    c->set_direct_compare(direct_compare(klass));
  
    if (is_profiling()) {
      // Note that we'd collect profile data in this method if we wanted it.
--- 2539,13 ---
  
  
  void GraphBuilder::check_cast(int klass_index) {
    bool will_link;
    ciKlass* klass = stream()->get_klass(will_link);
+   bool null_free = stream()->has_Q_signature();
    ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
!   CheckCast* c = new CheckCast(klass, apop(), state_before, null_free);
    apush(append_split(c));
    c->set_direct_compare(direct_compare(klass));
  
    if (is_profiling()) {
      // Note that we'd collect profile data in this method if we wanted it.

*** 2213,13 ***
    }
  }
  
  
  void GraphBuilder::monitorenter(Value x, int bci) {
    // save state before locking in case of deoptimization after a NullPointerException
    ValueStack* state_before = copy_state_for_exception_with_bci(bci);
!   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
    kill_all();
  }
  
  
  void GraphBuilder::monitorexit(Value x, int bci) {
--- 2580,32 ---
    }
  }
  
  
  void GraphBuilder::monitorenter(Value x, int bci) {
+   bool maybe_inlinetype = false;
+   if (bci == InvocationEntryBci) {
+     // Called by GraphBuilder::inline_sync_entry.
+ #ifdef ASSERT
+     ciType* obj_type = x->declared_type();
+     assert(obj_type == NULL || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
+ #endif
+   } else {
+     // We are compiling a monitorenter bytecode
+     if (EnableValhalla) {
+       ciType* obj_type = x->declared_type();
+       if (obj_type == NULL || obj_type->as_klass()->can_be_inline_klass()) {
+         // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
+         // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
+         maybe_inlinetype = true;
+       }
+     }
+   }
+ 
    // save state before locking in case of deoptimization after a NullPointerException
    ValueStack* state_before = copy_state_for_exception_with_bci(bci);
!   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
    kill_all();
  }
  
  
  void GraphBuilder::monitorexit(Value x, int bci) {

*** 2389,11 ***
  
    assert(cur_state != NULL, "state_before must be set");
    do {
      int cur_bci = cur_state->bci();
      assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
!     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
  
      // join with all potential exception handlers
      XHandlers* list = cur_scope_data->xhandlers();
      const int n = list->length();
      for (int i = 0; i < n; i++) {
--- 2775,13 ---
  
    assert(cur_state != NULL, "state_before must be set");
    do {
      int cur_bci = cur_state->bci();
      assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
!     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
+            || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
+ 
  
      // join with all potential exception handlers
      XHandlers* list = cur_scope_data->xhandlers();
      const int n = list->length();
      for (int i = 0; i < n; i++) {

*** 2877,10 ***
--- 3265,12 ---
        case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
        case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
        case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
        case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
        case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
+       case Bytecodes::_defaultvalue   : default_value(s.get_index_u2()); break;
+       case Bytecodes::_withfield      : withfield(s.get_index_u2()); break;
        case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
        default                         : ShouldNotReachHere(); break;
      }
  
      if (log != NULL)

*** 3165,11 ***
  
    // Set up locals for receiver
    int idx = 0;
    if (!method()->is_static()) {
      // we should always see the receiver
!     state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
      idx = 1;
    }
  
    // Set up locals for incoming arguments
    ciSignature* sig = method()->signature();
--- 3555,12 ---
  
    // Set up locals for receiver
    int idx = 0;
    if (!method()->is_static()) {
      // we should always see the receiver
!     state->store_local(idx, new Local(method()->holder(), objectType, idx,
+              /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
      idx = 1;
    }
  
    // Set up locals for incoming arguments
    ciSignature* sig = method()->signature();

*** 3177,11 ***
      ciType* type = sig->type_at(i);
      BasicType basic_type = type->basic_type();
      // don't allow T_ARRAY to propagate into locals types
      if (is_reference_type(basic_type)) basic_type = T_OBJECT;
      ValueType* vt = as_ValueType(basic_type);
!     state->store_local(idx, new Local(type, vt, idx, false));
      idx += type->size();
    }
  
    // lock synchronized method
    if (method()->is_synchronized()) {
--- 3568,11 ---
      ciType* type = sig->type_at(i);
      BasicType basic_type = type->basic_type();
      // don't allow T_ARRAY to propagate into locals types
      if (is_reference_type(basic_type)) basic_type = T_OBJECT;
      ValueType* vt = as_ValueType(basic_type);
!     state->store_local(idx, new Local(type, vt, idx, false, sig->is_null_free_at(i)));
      idx += type->size();
    }
  
    // lock synchronized method
    if (method()->is_synchronized()) {

*** 3197,10 ***
--- 3588,12 ---
    , _compilation(compilation)
    , _memory(new MemoryBuffer())
    , _inline_bailout_msg(NULL)
    , _instruction_count(0)
    , _osr_entry(NULL)
+   , _pending_field_access(NULL)
+   , _pending_load_indexed(NULL)
  {
    int osr_bci = compilation->osr_bci();
  
    // determine entry points and bci2block mapping
    BlockListBuilder blm(compilation, scope, osr_bci);
< prev index next >