< prev index next > src/hotspot/share/opto/parse.hpp
Print this page
ciTypeFlow* _flow; // Results of previous flow pass.
Block* _blocks; // Array of basic-block structs.
int _block_count; // Number of elements in _blocks.
GraphKit _exits; // Record all normal returns and throws here.
! bool _wrote_final; // Did we write a final field?
bool _wrote_volatile; // Did we write a volatile field?
bool _wrote_stable; // Did we write a @Stable field?
bool _wrote_fields; // Did we write any field?
Node* _alloc_with_final_or_stable; // An allocation node with final or @Stable field
Node* _stress_rf_hook; // StressReachabilityFences support
ciTypeFlow* _flow; // Results of previous flow pass.
Block* _blocks; // Array of basic-block structs.
int _block_count; // Number of elements in _blocks.
GraphKit _exits; // Record all normal returns and throws here.
! bool _wrote_non_strict_final; // Did we write a non-strict final field?
bool _wrote_volatile; // Did we write a volatile field?
bool _wrote_stable; // Did we write a @Stable field?
bool _wrote_fields; // Did we write any field?
Node* _alloc_with_final_or_stable; // An allocation node with final or @Stable field
Node* _stress_rf_hook; // StressReachabilityFences support
ciTypeFlow* flow() const { return _flow; }
// blocks() -- see rpo_at, start_block, etc.
int block_count() const { return _block_count; }
GraphKit& exits() { return _exits; }
! bool wrote_final() const { return _wrote_final; }
! void set_wrote_final(bool z) { _wrote_final = z; }
bool wrote_volatile() const { return _wrote_volatile; }
void set_wrote_volatile(bool z) { _wrote_volatile = z; }
bool wrote_stable() const { return _wrote_stable; }
void set_wrote_stable(bool z) { _wrote_stable = z; }
bool wrote_fields() const { return _wrote_fields; }
ciTypeFlow* flow() const { return _flow; }
// blocks() -- see rpo_at, start_block, etc.
int block_count() const { return _block_count; }
GraphKit& exits() { return _exits; }
! bool wrote_non_strict_final() const { return _wrote_non_strict_final; }
! void set_wrote_non_strict_final(bool z) { _wrote_non_strict_final = z; }
bool wrote_volatile() const { return _wrote_volatile; }
void set_wrote_volatile(bool z) { _wrote_volatile = z; }
bool wrote_stable() const { return _wrote_stable; }
void set_wrote_stable(bool z) { _wrote_stable = z; }
bool wrote_fields() const { return _wrote_fields; }
private:
// Create a JVMS & map for the initial state of this method.
SafePointNode* create_entry_map();
// OSR helpers
! Node *fetch_interpreter_state(int index, BasicType bt, Node* local_addrs);
! Node* check_interpreter_type(Node* l, const Type* type, SafePointNode* &bad_type_exit);
void load_interpreter_state(Node* osr_buf);
// Functions for managing basic blocks:
void init_blocks();
void load_state_from(Block* b);
private:
// Create a JVMS & map for the initial state of this method.
SafePointNode* create_entry_map();
// OSR helpers
! Node* fetch_interpreter_state(int index, const Type* type, Node* local_addrs);
! Node* check_interpreter_type(Node* l, ciType* ci_type, SafePointNode* &bad_type_exit);
void load_interpreter_state(Node* osr_buf);
// Functions for managing basic blocks:
void init_blocks();
void load_state_from(Block* b);
// Push the exception oop and merge the current mapping into an exception handler.
void push_and_merge_exception(int target_bci, Node* ex_oop);
// Helper: Merge the current mapping into the given basic block
void merge_common(Block* target, int pnum);
// Helper functions for merging individual cells.
! PhiNode *ensure_phi( int idx, bool nocreate = false);
! PhiNode *ensure_memory_phi(int idx, bool nocreate = false);
// Helper to merge the current memory state into the given basic block
void merge_memory_edges(MergeMemNode* n, int pnum, bool nophi);
// Parse this bytecode, and alter the Parsers JVM->Node mapping
void do_one_bytecode();
// helper function to generate array store check
! void array_store_check();
// Helper function to generate array load
void array_load(BasicType etype);
// Helper function to generate array store
void array_store(BasicType etype);
// Helper function to compute array addressing
Node* array_addressing(BasicType type, int vals, const Type*& elemtype);
void clinit_deopt();
// Pass current map to exits
void return_current(Node* value);
// Push the exception oop and merge the current mapping into an exception handler.
void push_and_merge_exception(int target_bci, Node* ex_oop);
// Helper: Merge the current mapping into the given basic block
void merge_common(Block* target, int pnum);
// Helper functions for merging individual cells.
! Node* ensure_phi( int idx, bool nocreate = false);
! PhiNode* ensure_memory_phi(int idx, bool nocreate = false);
// Helper to merge the current memory state into the given basic block
void merge_memory_edges(MergeMemNode* n, int pnum, bool nophi);
// Parse this bytecode, and alter the Parsers JVM->Node mapping
void do_one_bytecode();
// helper function to generate array store check
! Node* array_store_check(Node*& adr, const Type*& elemtype);
// Helper function to generate array load
void array_load(BasicType etype);
+ Node* load_from_unknown_flat_array(Node* array, Node* array_index, const TypeOopPtr* element_ptr);
// Helper function to generate array store
void array_store(BasicType etype);
+ void store_to_unknown_flat_array(Node* array, Node* idx, Node* non_null_stored_value);
// Helper function to compute array addressing
Node* array_addressing(BasicType type, int vals, const Type*& elemtype);
+ bool needs_range_check(const TypeInt* size_type, const Node* index) const;
+ Node* create_speculative_inline_type_array_checks(Node* array, const TypeAryPtr* array_type, const Type*& element_type);
+ Node* cast_to_speculative_array_type(Node* array, const TypeAryPtr*& array_type, const Type*& element_type);
+ Node* cast_to_profiled_array_type(Node* const array);
+ Node* speculate_non_null_free_array(Node* array, const TypeAryPtr*& array_type);
+ Node* speculate_non_flat_array(Node* array, const TypeAryPtr* array_type);
+ void create_range_check(Node* idx, Node* ary, const TypeInt* sizetype);
+ Node* record_profile_for_speculation_at_array_load(Node* ld);
void clinit_deopt();
// Pass current map to exits
void return_current(Node* value);
// common code for making initial checks and forming addresses
void do_field_access(bool is_get, bool is_field);
// common code for actually performing the load or store
! void do_get_xxx(Node* obj, ciField* field, bool is_field);
void do_put_xxx(Node* obj, ciField* field, bool is_field);
// implementation of object creation bytecodes
void do_new();
void do_newarray(BasicType elemtype);
! void do_anewarray();
void do_multianewarray();
Node* expand_multianewarray(ciArrayKlass* array_klass, Node* *lengths, int ndimensions, int nargs);
// implementation of jsr/ret
void do_jsr();
// common code for making initial checks and forming addresses
void do_field_access(bool is_get, bool is_field);
// common code for actually performing the load or store
! void do_get_xxx(Node* obj, ciField* field);
void do_put_xxx(Node* obj, ciField* field, bool is_field);
+ ciType* improve_abstract_inline_type_klass(ciType* field_klass);
+
// implementation of object creation bytecodes
void do_new();
void do_newarray(BasicType elemtype);
! void do_newarray();
void do_multianewarray();
Node* expand_multianewarray(ciArrayKlass* array_klass, Node* *lengths, int ndimensions, int nargs);
// implementation of jsr/ret
void do_jsr();
float branch_prediction(float &cnt, BoolTest::mask btest, int target_bci, Node* test);
bool seems_never_taken(float prob) const;
bool path_is_suitable_for_uncommon_trap(float prob) const;
void do_ifnull(BoolTest::mask btest, Node* c);
! void do_if(BoolTest::mask btest, Node* c);
int repush_if_args();
! void adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path);
void sharpen_type_after_if(BoolTest::mask btest,
Node* con, const Type* tcon,
Node* val, const Type* tval);
void maybe_add_predicate_after_if(Block* path);
IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt);
float branch_prediction(float &cnt, BoolTest::mask btest, int target_bci, Node* test);
bool seems_never_taken(float prob) const;
bool path_is_suitable_for_uncommon_trap(float prob) const;
void do_ifnull(BoolTest::mask btest, Node* c);
! void do_if(BoolTest::mask btest, Node* c, bool can_trap = true, bool new_path = false, Node** ctrl_taken = nullptr, Node** stress_count_mem = nullptr);
+ void do_acmp(BoolTest::mask btest, Node* left, Node* right);
+ void acmp_always_null_input(Node* input, const TypeOopPtr* tinput, BoolTest::mask btest, Node* eq_region);
+ void acmp_type_check_or_trap(Node** non_null_input, ciKlass* input_type, Deoptimization::DeoptReason);
+ void acmp_type_check(Node* input, const TypeOopPtr* tinput, ProfilePtrKind input_ptr, ciKlass* input_type, BoolTest::mask btest, Node* eq_region);
+ Node* acmp_null_check(Node* input, const TypeOopPtr* tinput, ProfilePtrKind input_ptr, Node*& null_ctl);
+ public:
+ static IfNode* acmp_fast_path_if_from_substitutable_call(PhaseGVN* phase, CallStaticJavaNode* call);
+ private:
int repush_if_args();
! void adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path, bool can_trap = true);
void sharpen_type_after_if(BoolTest::mask btest,
Node* con, const Type* tcon,
Node* val, const Type* tval);
void maybe_add_predicate_after_if(Block* path);
IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask, float prob, float cnt);
< prev index next >