123 return _gvn.find_long_con(n, value_if_unknown);
124 }
125 // (See also macro find_intptr_t_con in type.hpp, which uses one of these.)
126
127 // JVM State accessors:
128 // Parser mapping from JVM indices into Nodes.
129 // Low slots are accessed by the StartNode::enum.
130 // Then come the locals at StartNode::Parms to StartNode::Parms+max_locals();
131 // Then come JVM stack slots.
132 // Finally come the monitors, if any.
133 // See layout accessors in class JVMState.
134
135 SafePointNode* map() const { return _map; }
136 bool has_exceptions() const { return _exceptions != nullptr; }
137 JVMState* jvms() const { return map_not_null()->_jvms; }
138 int sp() const { return _sp; }
139 int bci() const { return _bci; }
140 Bytecodes::Code java_bc() const;
141 ciMethod* method() const { return _method; }
142
143 void set_jvms(JVMState* jvms) { set_map(jvms->map());
144 assert(jvms == this->jvms(), "sanity");
145 _sp = jvms->sp();
146 _bci = jvms->bci();
147 _method = jvms->has_method() ? jvms->method() : nullptr; }
148 void set_map(SafePointNode* m) { _map = m; debug_only(verify_map()); }
149 void set_sp(int sp) { assert(sp >= 0, "sp must be non-negative: %d", sp); _sp = sp; }
150 void clean_stack(int from_sp); // clear garbage beyond from_sp to top
151
152 void inc_sp(int i) { set_sp(sp() + i); }
153 void dec_sp(int i) { set_sp(sp() - i); }
154 void set_bci(int bci) { _bci = bci; }
155
156 // Make sure jvms has current bci & sp.
157 JVMState* sync_jvms() const;
158 JVMState* sync_jvms_for_reexecute();
159
160 #ifdef ASSERT
161 // Make sure JVMS has an updated copy of bci and sp.
162 // Also sanity-check method, depth, and monitor depth.
814 RC_NO_FP = 1, // CallLeafNoFPNode
815 RC_NO_IO = 2, // do not hook IO edges
816 RC_NO_LEAF = 4, // CallStaticJavaNode
817 RC_MUST_THROW = 8, // flag passed to add_safepoint_edges
818 RC_NARROW_MEM = 16, // input memory is same as output
819 RC_UNCOMMON = 32, // freq. expected to be like uncommon trap
820 RC_VECTOR = 64, // CallLeafVectorNode
821 RC_LEAF = 0 // null value: no flags set
822 };
823
824 // merge in all memory slices from new_mem, along the given path
825 void merge_memory(Node* new_mem, Node* region, int new_path);
826 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj, bool deoptimize = false);
827
828 // Helper functions to build synchronizations
829 int next_monitor();
830 Node* insert_mem_bar(int opcode, Node* precedent = nullptr);
831 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = nullptr);
832 // Optional 'precedent' is appended as an extra edge, to force ordering.
833 FastLockNode* shared_lock(Node* obj);
834 void shared_unlock(Node* box, Node* obj);
835
836 // helper functions for the fast path/slow path idioms
837 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
838
839 // Generate an instance-of idiom. Used by both the instance-of bytecode
840 // and the reflective instance-of call.
841 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
842
843 // Generate a check-cast idiom. Used by both the check-cast bytecode
844 // and the array-store bytecode
845 Node* gen_checkcast( Node *subobj, Node* superkls,
846 Node* *failure_control = nullptr );
847
848 Node* gen_subtype_check(Node* obj, Node* superklass);
849
850 // Exact type check used for predicted calls and casts.
851 // Rewrites (*casted_receiver) to be casted to the stronger type.
852 // (Caller is responsible for doing replace_in_map.)
853 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
854 Node* *casted_receiver);
855
856 // Inexact type check used for predicted calls.
857 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
858 Node** casted_receiver);
859
860 // implementation of object creation
861 Node* set_output_for_allocation(AllocateNode* alloc,
862 const TypeOopPtr* oop_type,
863 bool deoptimize_on_exception=false);
864 Node* get_layout_helper(Node* klass_node, jint& constant_value);
865 Node* new_instance(Node* klass_node,
866 Node* slow_test = nullptr,
867 Node* *return_size_val = nullptr,
868 bool deoptimize_on_exception = false);
869 Node* new_array(Node* klass_node, Node* count_val, int nargs,
870 Node* *return_size_val = nullptr,
871 bool deoptimize_on_exception = false);
872
873 // java.lang.String helpers
874 Node* load_String_length(Node* str, bool set_ctrl);
875 Node* load_String_value(Node* str, bool set_ctrl);
876 Node* load_String_coder(Node* str, bool set_ctrl);
877 void store_String_value(Node* str, Node* value);
878 void store_String_coder(Node* str, Node* value);
879 Node* capture_memory(const TypePtr* src_type, const TypePtr* dst_type);
880 Node* compress_string(Node* src, const TypeAryPtr* src_type, Node* dst, Node* count);
881 void inflate_string(Node* src, Node* dst, const TypeAryPtr* dst_type, Node* count);
882 void inflate_string_slow(Node* src, Node* dst, Node* start, Node* count);
883
890 return iff;
891 }
892
893 IfNode* create_and_xform_if(Node* ctrl, Node* tst, float prob, float cnt) {
894 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
895 _gvn.transform(iff); // Value may be known at parse-time
896 // Place 'if' on worklist if it will be in graph
897 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
898 return iff;
899 }
900
901 void add_parse_predicates(int nargs = 0);
902 void add_parse_predicate(Deoptimization::DeoptReason reason, int nargs);
903
904 Node* make_constant_from_field(ciField* field, Node* obj);
905
906 // Vector API support (implemented in vectorIntrinsics.cpp)
907 Node* box_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool deoptimize_on_exception = false);
908 Node* unbox_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool shuffle_to_vector = false);
909 Node* vector_shift_count(Node* cnt, int shift_op, BasicType bt, int num_elem);
910 };
911
912 // Helper class to support building of control flow branches. Upon
913 // creation the map and sp at bci are cloned and restored upon de-
914 // struction. Typical use:
915 //
916 // { PreserveJVMState pjvms(this);
917 // // code of new branch
918 // }
919 // // here the JVM state at bci is established
920
921 class PreserveJVMState: public StackObj {
922 protected:
923 GraphKit* _kit;
924 #ifdef ASSERT
925 int _block; // PO of current block, if a Parse
926 int _bci;
927 #endif
928 SafePointNode* _map;
929 uint _sp;
|
123 return _gvn.find_long_con(n, value_if_unknown);
124 }
125 // (See also macro find_intptr_t_con in type.hpp, which uses one of these.)
126
127 // JVM State accessors:
128 // Parser mapping from JVM indices into Nodes.
129 // Low slots are accessed by the StartNode::enum.
130 // Then come the locals at StartNode::Parms to StartNode::Parms+max_locals();
131 // Then come JVM stack slots.
132 // Finally come the monitors, if any.
133 // See layout accessors in class JVMState.
134
135 SafePointNode* map() const { return _map; }
136 bool has_exceptions() const { return _exceptions != nullptr; }
137 JVMState* jvms() const { return map_not_null()->_jvms; }
138 int sp() const { return _sp; }
139 int bci() const { return _bci; }
140 Bytecodes::Code java_bc() const;
141 ciMethod* method() const { return _method; }
142
143 PartialEscapeAnalysis* PEA() const { return C->PEA(); }
144 void set_jvms(JVMState* jvms) { set_map(jvms->map());
145 assert(jvms == this->jvms(), "sanity");
146 _sp = jvms->sp();
147 _bci = jvms->bci();
148 _method = jvms->has_method() ? jvms->method() : nullptr; }
149 void set_map(SafePointNode* m) { _map = m; debug_only(verify_map()); }
150 void set_sp(int sp) { assert(sp >= 0, "sp must be non-negative: %d", sp); _sp = sp; }
151 void clean_stack(int from_sp); // clear garbage beyond from_sp to top
152
153 void inc_sp(int i) { set_sp(sp() + i); }
154 void dec_sp(int i) { set_sp(sp() - i); }
155 void set_bci(int bci) { _bci = bci; }
156
157 // Make sure jvms has current bci & sp.
158 JVMState* sync_jvms() const;
159 JVMState* sync_jvms_for_reexecute();
160
161 #ifdef ASSERT
162 // Make sure JVMS has an updated copy of bci and sp.
163 // Also sanity-check method, depth, and monitor depth.
815 RC_NO_FP = 1, // CallLeafNoFPNode
816 RC_NO_IO = 2, // do not hook IO edges
817 RC_NO_LEAF = 4, // CallStaticJavaNode
818 RC_MUST_THROW = 8, // flag passed to add_safepoint_edges
819 RC_NARROW_MEM = 16, // input memory is same as output
820 RC_UNCOMMON = 32, // freq. expected to be like uncommon trap
821 RC_VECTOR = 64, // CallLeafVectorNode
822 RC_LEAF = 0 // null value: no flags set
823 };
824
825 // merge in all memory slices from new_mem, along the given path
826 void merge_memory(Node* new_mem, Node* region, int new_path);
827 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj, bool deoptimize = false);
828
829 // Helper functions to build synchronizations
830 int next_monitor();
831 Node* insert_mem_bar(int opcode, Node* precedent = nullptr);
832 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = nullptr);
833 // Optional 'precedent' is appended as an extra edge, to force ordering.
834 FastLockNode* shared_lock(Node* obj);
835 void shared_unlock(Node* box, Node* obj, bool preserve_monitor = false);
836 void clone_shared_lock(Node* box, Node* obj);
837
838 // helper functions for the fast path/slow path idioms
839 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
840
841 // Generate an instance-of idiom. Used by both the instance-of bytecode
842 // and the reflective instance-of call.
843 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
844
845 // Generate a check-cast idiom. Used by both the check-cast bytecode
846 // and the array-store bytecode
847 Node* gen_checkcast( Node *subobj, Node* superkls,
848 Node* *failure_control = nullptr );
849
850 Node* gen_subtype_check(Node* obj, Node* superklass);
851
852 // Exact type check used for predicted calls and casts.
853 // Rewrites (*casted_receiver) to be casted to the stronger type.
854 // (Caller is responsible for doing replace_in_map.)
855 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
856 Node* *casted_receiver);
857
858 // Inexact type check used for predicted calls.
859 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
860 Node** casted_receiver);
861
862 Node* set_output_for_allocation_common(AllocateNode* alloc,
863 const TypeOopPtr* oop_type,
864 bool deoptimize_on_exception);
865
866 // implementation of object creation
867 Node* set_output_for_allocation(AllocateNode* alloc,
868 const TypeOopPtr* oop_type,
869 bool deoptimize_on_exception=false);
870
871 Node* materialize_object(AllocateNode* alloc, const TypeOopPtr* oop_type);
872
873 Node* get_layout_helper(Node* klass_node, jint& constant_value);
874 Node* new_instance(Node* klass_node,
875 Node* slow_test = nullptr,
876 Node* *return_size_val = nullptr,
877 bool deoptimize_on_exception = false);
878 Node* new_array(Node* klass_node, Node* count_val, int nargs,
879 Node* *return_size_val = nullptr,
880 bool deoptimize_on_exception = false);
881
882 // java.lang.String helpers
883 Node* load_String_length(Node* str, bool set_ctrl);
884 Node* load_String_value(Node* str, bool set_ctrl);
885 Node* load_String_coder(Node* str, bool set_ctrl);
886 void store_String_value(Node* str, Node* value);
887 void store_String_coder(Node* str, Node* value);
888 Node* capture_memory(const TypePtr* src_type, const TypePtr* dst_type);
889 Node* compress_string(Node* src, const TypeAryPtr* src_type, Node* dst, Node* count);
890 void inflate_string(Node* src, Node* dst, const TypeAryPtr* dst_type, Node* count);
891 void inflate_string_slow(Node* src, Node* dst, Node* start, Node* count);
892
899 return iff;
900 }
901
902 IfNode* create_and_xform_if(Node* ctrl, Node* tst, float prob, float cnt) {
903 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
904 _gvn.transform(iff); // Value may be known at parse-time
905 // Place 'if' on worklist if it will be in graph
906 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
907 return iff;
908 }
909
910 void add_parse_predicates(int nargs = 0);
911 void add_parse_predicate(Deoptimization::DeoptReason reason, int nargs);
912
913 Node* make_constant_from_field(ciField* field, Node* obj);
914
915 // Vector API support (implemented in vectorIntrinsics.cpp)
916 Node* box_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool deoptimize_on_exception = false);
917 Node* unbox_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool shuffle_to_vector = false);
918 Node* vector_shift_count(Node* cnt, int shift_op, BasicType bt, int num_elem);
919 void backfill_materialized(SafePointNode* map, uint begin, uint end, PEAState& as);
920 };
921
922 // Helper class to support building of control flow branches. Upon
923 // creation the map and sp at bci are cloned and restored upon de-
924 // struction. Typical use:
925 //
926 // { PreserveJVMState pjvms(this);
927 // // code of new branch
928 // }
929 // // here the JVM state at bci is established
930
931 class PreserveJVMState: public StackObj {
932 protected:
933 GraphKit* _kit;
934 #ifdef ASSERT
935 int _block; // PO of current block, if a Parse
936 int _bci;
937 #endif
938 SafePointNode* _map;
939 uint _sp;
|