< prev index next >

src/hotspot/share/c1/c1_LIRGenerator.hpp

Print this page
@@ -167,10 +167,11 @@
    BlockBegin*   _block;
    int           _virtual_register_number;
    Values        _instruction_for_operand;
    BitMap2D      _vreg_flags; // flags which can be set on a per-vreg basis
    LIR_List*     _lir;
+   bool          _in_conditional_code;
  
    LIRGenerator* gen() {
      return this;
    }
  

@@ -193,10 +194,11 @@
    LIR_OprList                     _reg_for_constants;
    Values                          _unpinned_constants;
  
    friend class PhiResolver;
  
+   void set_in_conditional_code(bool v);
   public:
    // unified bailout support
    void bailout(const char* msg) const            { compilation()->bailout(msg); }
    bool bailed_out() const                        { return compilation()->bailed_out(); }
  

@@ -212,10 +214,11 @@
  
    // get a constant into a register and get track of what register was used
    LIR_Opr load_constant(Constant* x);
    LIR_Opr load_constant(LIR_Const* constant);
  
+   bool in_conditional_code() { return _in_conditional_code; }
    // Given an immediate value, return an operand usable in logical ops.
    LIR_Opr load_immediate(jlong x, BasicType type);
  
    void  set_result(Value x, LIR_Opr opr)           {
      assert(opr->is_valid(), "must set to valid value");

@@ -271,10 +274,23 @@
    void do_update_CRC32(Intrinsic* x);
    void do_update_CRC32C(Intrinsic* x);
    void do_vectorizedMismatch(Intrinsic* x);
    void do_blackhole(Intrinsic* x);
  
+   void access_flat_array(bool is_load, LIRItem& array, LIRItem& index, LIRItem& obj_item, ciField* field = nullptr, int offset = 0);
+   void access_sub_element(LIRItem& array, LIRItem& index, LIR_Opr& result, ciField* field, int sub_offset);
+   LIR_Opr get_and_load_element_address(LIRItem& array, LIRItem& index);
+   bool needs_flat_array_store_check(StoreIndexed* x);
+   void check_flat_array(LIR_Opr array, LIR_Opr value, CodeStub* slow_path);
+   bool needs_null_free_array_store_check(StoreIndexed* x);
+   void check_null_free_array(LIRItem& array, LIRItem& value,  CodeEmitInfo* info);
+   void substitutability_check(IfOp* x, LIRItem& left, LIRItem& right, LIRItem& t_val, LIRItem& f_val);
+   void substitutability_check(If* x, LIRItem& left, LIRItem& right);
+   void substitutability_check_common(Value left_val, Value right_val, LIRItem& left, LIRItem& right,
+                                      LIR_Opr equal_result, LIR_Opr not_equal_result, LIR_Opr result, CodeEmitInfo* info);
+   void init_temps_for_substitutability_check(LIR_Opr& tmp1, LIR_Opr& tmp2);
+ 
   public:
    LIR_Opr call_runtime(BasicTypeArray* signature, LIRItemList* args, address entry, ValueType* result_type, CodeEmitInfo* info);
    LIR_Opr call_runtime(BasicTypeArray* signature, LIR_OprList* args, address entry, ValueType* result_type, CodeEmitInfo* info);
  
    // convenience functions

@@ -324,11 +340,11 @@
  
    ciObject* get_jobject_constant(Value value);
  
    LIRItemList* invoke_visit_arguments(Invoke* x);
    void invoke_load_arguments(Invoke* x, LIRItemList* args, const LIR_OprList* arg_list);
- 
+   void invoke_load_one_argument(LIRItem* param, LIR_Opr loc);
    void trace_block_entry(BlockBegin* block);
  
    // volatile field operations are never patchable because a klass
    // must be loaded to know it's volatile which means that the offset
    // it always known as well.

@@ -360,14 +376,14 @@
  
    void shift_op   (Bytecodes::Code code, LIR_Opr dst_reg, LIR_Opr value, LIR_Opr count, LIR_Opr tmp);
  
    void logic_op   (Bytecodes::Code code, LIR_Opr dst_reg, LIR_Opr left, LIR_Opr right);
  
-   void monitor_enter (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info);
+   void monitor_enter (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no, CodeEmitInfo* info_for_exception, CodeEmitInfo* info, CodeStub* throw_imse_stub);
    void monitor_exit  (LIR_Opr object, LIR_Opr lock, LIR_Opr hdr, LIR_Opr scratch, int monitor_no);
  
-   void new_instance    (LIR_Opr  dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr  scratch1, LIR_Opr  scratch2, LIR_Opr  scratch3,  LIR_Opr scratch4, LIR_Opr  klass_reg, CodeEmitInfo* info);
+   void new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, bool allow_inline, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info);
  
    // machine dependent
    void cmp_mem_int(LIR_Condition condition, LIR_Opr base, int disp, int c, CodeEmitInfo* info);
    void cmp_reg_mem(LIR_Condition condition, LIR_Opr reg, LIR_Opr base, int disp, BasicType type, CodeEmitInfo* info);
  

@@ -475,10 +491,15 @@
                          Value arg, LIR_Opr& mdp, bool not_null, ciKlass* signature_at_call_k,
                          ciKlass* callee_signature_k);
    void profile_arguments(ProfileCall* x);
    void profile_parameters(Base* x);
    void profile_parameters_at_call(ProfileCall* x);
+   void profile_flags(ciMethodData* md, ciProfileData* load_store, int flag, LIR_Condition condition = lir_cond_always);
+   template <class ArrayData> void profile_null_free_array(LIRItem array, ciMethodData* md, ArrayData* load_store);
+   template <class ArrayData> void profile_array_type(AccessIndexed* x, ciMethodData*& md, ArrayData*& load_store);
+   void profile_element_type(Value element, ciMethodData* md, ciArrayLoadData* load_store);
+   bool profile_inline_klass(ciMethodData* md, ciProfileData* data, Value value, int flag);
    LIR_Opr mask_boolean(LIR_Opr array, LIR_Opr value, CodeEmitInfo*& null_check_info);
  
   public:
    Compilation*  compilation() const              { return _compilation; }
    FrameMap*     frame_map() const                { return _compilation->frame_map(); }

@@ -502,10 +523,11 @@
    LIRGenerator(Compilation* compilation, ciMethod* method)
      : _compilation(compilation)
      , _method(method)
      , _virtual_register_number(LIR_Opr::vreg_base)
      , _vreg_flags(num_vreg_flags)
+     , _in_conditional_code(false)
      , _barrier_set(BarrierSet::barrier_set()->barrier_set_c1()) {
    }
  
    // for virtual registers, maps them back to Phi's or Local's
    Instruction* instruction_for_opr(LIR_Opr opr);

@@ -580,10 +602,11 @@
    virtual void do_UnsafePut      (UnsafePut*       x);
    virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x);
    virtual void do_ProfileCall    (ProfileCall*     x);
    virtual void do_ProfileReturnType (ProfileReturnType* x);
    virtual void do_ProfileInvoke  (ProfileInvoke*   x);
+   virtual void do_ProfileACmpTypes(ProfileACmpTypes* x);
    virtual void do_RuntimeCall    (RuntimeCall*     x);
    virtual void do_MemBar         (MemBar*          x);
    virtual void do_RangeCheckPredicate(RangeCheckPredicate* x);
  #ifdef ASSERT
    virtual void do_Assert         (Assert*          x);
< prev index next >