< prev index next >

src/hotspot/share/c1/c1_GraphBuilder.hpp

Print this page
*** 33,10 ***
--- 33,25 ---
  #include "ci/ciStreams.hpp"
  #include "compiler/compileLog.hpp"
  
  class MemoryBuffer;
  
+ class DelayedFieldAccess : public CompilationResourceObj {
+ private:
+   Value            _obj;
+   ciInstanceKlass* _holder;
+   int              _offset;
+ public:
+   DelayedFieldAccess(Value obj, ciInstanceKlass* holder, int offset)
+   : _obj(obj), _holder(holder) , _offset(offset) { }
+ 
+   Value obj() const               { return _obj; }
+   ciInstanceKlass* holder() const { return _holder; }
+   int offset() const              { return _offset; }
+   void inc_offset(int offset)     { _offset += offset; }
+ };
+ 
  class GraphBuilder {
   private:
    // Per-scope data. These are pushed and popped as we descend into
    // inlined methods. Currently in order to generate good code in the
    // inliner we have to attempt to inline methods directly into the

*** 189,10 ***
--- 204,14 ---
    BlockBegin*       _block;                      // the current block
    ValueStack*       _state;                      // the current execution state
    Instruction*      _last;                       // the last instruction added
    bool              _skip_block;                 // skip processing of the rest of this block
  
+   // support for optimization of accesses to flattened fields and arrays
+   DelayedFieldAccess* _pending_field_access;
+   DelayedLoadIndexed* _pending_load_indexed;
+ 
    // accessors
    ScopeData*        scope_data() const           { return _scope_data; }
    Compilation*      compilation() const          { return _compilation; }
    BlockList*        bci2block() const            { return scope_data()->bci2block(); }
    ValueMap*         vmap() const                 { assert(UseLocalValueNumbering, "should not access otherwise"); return _vmap; }

*** 206,10 ***
--- 225,16 ---
    ciBytecodeStream* stream() const               { return scope_data()->stream(); }
    Instruction*      last() const                 { return _last; }
    Bytecodes::Code   code() const                 { return stream()->cur_bc(); }
    int               bci() const                  { return stream()->cur_bci(); }
    int               next_bci() const             { return stream()->next_bci(); }
+   bool              has_pending_field_access()   { return _pending_field_access != NULL; }
+   DelayedFieldAccess* pending_field_access()     { return _pending_field_access; }
+   void              set_pending_field_access(DelayedFieldAccess* delayed) { _pending_field_access = delayed; }
+   bool              has_pending_load_indexed()   { return _pending_load_indexed != NULL; }
+   DelayedLoadIndexed* pending_load_indexed()     { return _pending_load_indexed; }
+   void              set_pending_load_indexed(DelayedLoadIndexed* delayed) { _pending_load_indexed = delayed; }
  
    // unified bailout support
    void bailout(const char* msg) const            { compilation()->bailout(msg); }
    bool bailed_out() const                        { return compilation()->bailed_out(); }
  

*** 265,10 ***
--- 290,15 ---
    void monitorexit(Value x, int bci);
    void new_multi_array(int dimensions);
    void throw_op(int bci);
    Value round_fp(Value fp_value);
  
+   // inline types
+   void default_value(int klass_index);
+   void withfield(int field_index);
+   void copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* encloding_field = NULL);
+ 
    // stack/code manipulation helpers
    Instruction* append_with_bci(Instruction* instr, int bci);
    Instruction* append(Instruction* instr);
    Instruction* append_split(StateSplit* instr);
  

*** 358,10 ***
--- 388,23 ---
    const char* should_not_inline(ciMethod* callee) const;
  
    // JSR 292 support
    bool try_method_handle_inline(ciMethod* callee, bool ignore_return);
  
+   // Inline type support
+   void update_larval_state(Value v) {
+     if (v != NULL && v->as_NewInlineTypeInstance() != NULL) {
+       v->as_NewInlineTypeInstance()->set_not_larva_anymore();
+     }
+   }
+   void update_larva_stack_count(Value v) {
+     if (v != NULL && v->as_NewInlineTypeInstance() != NULL &&
+         v->as_NewInlineTypeInstance()->in_larval_state()) {
+       v->as_NewInlineTypeInstance()->decrement_on_stack_count();
+     }
+   }
+ 
    // helpers
    void inline_bailout(const char* msg);
    BlockBegin* header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state);
    BlockBegin* setup_start_block(int osr_bci, BlockBegin* std_entry, BlockBegin* osr_entry, ValueStack* init_state);
    void setup_osr_entry_block();

*** 392,10 ***
--- 435,11 ---
    bool profile_inlined_calls() { return _compilation->profile_inlined_calls(); }
    bool profile_checkcasts()    { return _compilation->profile_checkcasts();    }
    bool profile_parameters()    { return _compilation->profile_parameters();    }
    bool profile_arguments()     { return _compilation->profile_arguments();     }
    bool profile_return()        { return _compilation->profile_return();        }
+   bool profile_array_accesses(){ return _compilation->profile_array_accesses();}
  
    Values* args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver);
    Values* collect_args_for_profiling(Values* args, ciMethod* target, bool may_have_receiver);
    void check_args_for_profiling(Values* obj_args, int expected);
  
< prev index next >