16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_GRAPHKIT_HPP
26 #define SHARE_OPTO_GRAPHKIT_HPP
27
28 #include "ci/ciEnv.hpp"
29 #include "ci/ciMethodData.hpp"
30 #include "gc/shared/c2/barrierSetC2.hpp"
31 #include "opto/addnode.hpp"
32 #include "opto/callnode.hpp"
33 #include "opto/cfgnode.hpp"
34 #include "opto/compile.hpp"
35 #include "opto/divnode.hpp"
36 #include "opto/mulnode.hpp"
37 #include "opto/phaseX.hpp"
38 #include "opto/subnode.hpp"
39 #include "opto/type.hpp"
40 #include "runtime/deoptimization.hpp"
41
42 class BarrierSetC2;
43 class FastLockNode;
44 class FastUnlockNode;
45 class IdealKit;
46 class LibraryCallKit;
47 class Parse;
48 class RootNode;
49
50 //-----------------------------------------------------------------------------
51 //----------------------------GraphKit-----------------------------------------
52 // Toolkit for building the common sorts of subgraphs.
53 // Does not know about bytecode parsing or type-flow results.
54 // It is able to create graphs implementing the semantics of most
55 // or all bytecodes, so that it can expand intrinsics and calls.
56 // It may depend on JVMState structure, but it must not depend
57 // on specific bytecode streams.
58 class GraphKit : public Phase {
59 friend class PreserveJVMState;
60
61 protected:
62 ciEnv* _env; // Compilation environment
63 PhaseGVN &_gvn; // Some optimizations while parsing
64 SafePointNode* _map; // Parser map from JVM to Nodes
65 SafePointNode* _exceptions;// Parser map(s) for exception state(s)
66 int _bci; // JVM Bytecode Pointer
67 ciMethod* _method; // JVM Current Method
68 BarrierSetC2* _barrier_set;
69
70 private:
71 int _sp; // JVM Expression Stack Pointer; don't modify directly!
72
73 private:
74 SafePointNode* map_not_null() const {
75 assert(_map != nullptr, "must call stopped() to test for reset compiler map");
76 return _map;
77 }
78
79 public:
80 GraphKit(); // empty constructor
81 GraphKit(JVMState* jvms); // the JVM state on which to operate
82
83 #ifdef ASSERT
84 ~GraphKit() {
85 assert(!has_exceptions(), "user must call transfer_exceptions_into_jvms");
86 }
87 #endif
88
89 virtual Parse* is_Parse() const { return nullptr; }
90 virtual LibraryCallKit* is_LibraryCallKit() const { return nullptr; }
91
92 ciEnv* env() const { return _env; }
93 PhaseGVN& gvn() const { return _gvn; }
94 void* barrier_set_state() const { return C->barrier_set_state(); }
95
96 void record_for_igvn(Node* n) const { C->record_for_igvn(n); } // delegate to Compile
97 void remove_for_igvn(Node* n) const { C->remove_for_igvn(n); }
98
99 // Handy well-known nodes:
100 Node* null() const { return zerocon(T_OBJECT); }
101 Node* top() const { return C->top(); }
102 RootNode* root() const { return C->root(); }
103
104 // Create or find a constant node
105 Node* intcon(jint con) const { return _gvn.intcon(con); }
106 Node* longcon(jlong con) const { return _gvn.longcon(con); }
107 Node* integercon(jlong con, BasicType bt) const {
108 if (bt == T_INT) {
109 return intcon(checked_cast<jint>(con));
110 }
111 assert(bt == T_LONG, "basic type not an int or long");
112 return longcon(con);
113 }
114 Node* makecon(const Type *t) const { return _gvn.makecon(t); }
115 Node* zerocon(BasicType bt) const { return _gvn.zerocon(bt); }
116 // (See also macro MakeConX in type.hpp, which uses intcon or longcon.)
343 Node* ConvL2I(Node* offset);
344 // Find out the klass of an object.
345 Node* load_object_klass(Node* object);
346 // Find out the length of an array.
347 Node* load_array_length(Node* array);
348 // Cast array allocation's length as narrow as possible.
349 // If replace_length_in_map is true, replace length with CastIINode in map.
350 // This method is invoked after creating/moving ArrayAllocationNode or in load_array_length
351 Node* array_ideal_length(AllocateArrayNode* alloc,
352 const TypeOopPtr* oop_type,
353 bool replace_length_in_map);
354
355
356 // Helper function to do a null pointer check or ZERO check based on type.
357 // Throw an exception if a given value is null.
358 // Return the value cast to not-null.
359 // Be clever about equivalent dominating null checks.
360 Node* null_check_common(Node* value, BasicType type,
361 bool assert_null = false,
362 Node* *null_control = nullptr,
363 bool speculative = false);
364 Node* null_check(Node* value, BasicType type = T_OBJECT) {
365 return null_check_common(value, type, false, nullptr, !_gvn.type(value)->speculative_maybe_null());
366 }
367 Node* null_check_receiver() {
368 assert(argument(0)->bottom_type()->isa_ptr(), "must be");
369 return null_check(argument(0));
370 }
371 Node* zero_check_int(Node* value) {
372 assert(value->bottom_type()->basic_type() == T_INT,
373 "wrong type: %s", type2name(value->bottom_type()->basic_type()));
374 return null_check_common(value, T_INT);
375 }
376 Node* zero_check_long(Node* value) {
377 assert(value->bottom_type()->basic_type() == T_LONG,
378 "wrong type: %s", type2name(value->bottom_type()->basic_type()));
379 return null_check_common(value, T_LONG);
380 }
381 // Throw an uncommon trap if a given value is __not__ null.
382 // Return the value cast to null, and be clever about dominating checks.
383 Node* null_assert(Node* value, BasicType type = T_OBJECT) {
384 return null_check_common(value, type, true, nullptr, _gvn.type(value)->speculative_always_null());
385 }
386
387 // Check if value is null and abort if it is
388 Node* must_be_not_null(Node* value, bool do_replace_in_map);
586 }
587 // This is the base version which is given alias index
588 // Return the new StoreXNode
589 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt,
590 int adr_idx,
591 MemNode::MemOrd,
592 bool require_atomic_access = false,
593 bool unaligned = false,
594 bool mismatched = false,
595 bool unsafe = false,
596 int barrier_data = 0);
597
598 // Perform decorated accesses
599
600 Node* access_store_at(Node* obj, // containing obj
601 Node* adr, // actual address to store val at
602 const TypePtr* adr_type,
603 Node* val,
604 const Type* val_type,
605 BasicType bt,
606 DecoratorSet decorators);
607
608 Node* access_load_at(Node* obj, // containing obj
609 Node* adr, // actual address to load val at
610 const TypePtr* adr_type,
611 const Type* val_type,
612 BasicType bt,
613 DecoratorSet decorators);
614
615 Node* access_load(Node* adr, // actual address to load val at
616 const Type* val_type,
617 BasicType bt,
618 DecoratorSet decorators);
619
620 Node* access_atomic_cmpxchg_val_at(Node* obj,
621 Node* adr,
622 const TypePtr* adr_type,
623 int alias_idx,
624 Node* expected_val,
625 Node* new_val,
626 const Type* value_type,
627 BasicType bt,
628 DecoratorSet decorators);
629
630 Node* access_atomic_cmpxchg_bool_at(Node* obj,
631 Node* adr,
632 const TypePtr* adr_type,
633 int alias_idx,
671 void make_dtrace_method_entry_exit(ciMethod* method, bool is_entry);
672 void make_dtrace_method_entry(ciMethod* method) {
673 make_dtrace_method_entry_exit(method, true);
674 }
675 void make_dtrace_method_exit(ciMethod* method) {
676 make_dtrace_method_entry_exit(method, false);
677 }
678
679 //--------------- stub generation -------------------
680 public:
681 void gen_stub(address C_function,
682 const char *name,
683 int is_fancy_jump,
684 bool pass_tls,
685 bool return_pc);
686
687 //---------- help for generating calls --------------
688
689 // Do a null check on the receiver as it would happen before the call to
690 // callee (with all arguments still on the stack).
691 Node* null_check_receiver_before_call(ciMethod* callee) {
692 assert(!callee->is_static(), "must be a virtual method");
693 // Callsite signature can be different from actual method being called (i.e _linkTo* sites).
694 // Use callsite signature always.
695 ciMethod* declared_method = method()->get_method_at_bci(bci());
696 const int nargs = declared_method->arg_size();
697 inc_sp(nargs);
698 Node* n = null_check_receiver();
699 dec_sp(nargs);
700 return n;
701 }
702
703 // Fill in argument edges for the call from argument(0), argument(1), ...
704 // (The next step is to call set_edges_for_java_call.)
705 void set_arguments_for_java_call(CallJavaNode* call);
706
707 // Fill in non-argument edges for the call.
708 // Transform the call, and update the basics: control, i_o, memory.
709 // (The next step is usually to call set_results_for_java_call.)
710 void set_edges_for_java_call(CallJavaNode* call,
711 bool must_throw = false, bool separate_io_proj = false);
712
713 // Finish up a java call that was started by set_edges_for_java_call.
714 // Call add_exception on any throw arising from the call.
715 // Return the call result (transformed).
716 Node* set_results_for_java_call(CallJavaNode* call, bool separate_io_proj = false, bool deoptimize = false);
717
718 // Similar to set_edges_for_java_call, but simplified for runtime calls.
719 void set_predefined_output_for_runtime_call(Node* call) {
720 set_predefined_output_for_runtime_call(call, nullptr, nullptr);
721 }
722 void set_predefined_output_for_runtime_call(Node* call,
723 Node* keep_mem,
724 const TypePtr* hook_mem);
725 Node* set_predefined_input_for_runtime_call(SafePointNode* call, Node* narrow_mem = nullptr);
824 void merge_memory(Node* new_mem, Node* region, int new_path);
825 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj, bool deoptimize = false);
826
827 // Helper functions to build synchronizations
828 int next_monitor();
829 Node* insert_mem_bar(int opcode, Node* precedent = nullptr);
830 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = nullptr);
831 // Optional 'precedent' is appended as an extra edge, to force ordering.
832 FastLockNode* shared_lock(Node* obj);
833 void shared_unlock(Node* box, Node* obj);
834
835 // helper functions for the fast path/slow path idioms
836 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
837
838 // Generate an instance-of idiom. Used by both the instance-of bytecode
839 // and the reflective instance-of call.
840 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
841
842 // Generate a check-cast idiom. Used by both the check-cast bytecode
843 // and the array-store bytecode
844 Node* gen_checkcast( Node *subobj, Node* superkls,
845 Node* *failure_control = nullptr );
846
847 Node* gen_subtype_check(Node* obj, Node* superklass);
848
849 // Exact type check used for predicted calls and casts.
850 // Rewrites (*casted_receiver) to be casted to the stronger type.
851 // (Caller is responsible for doing replace_in_map.)
852 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
853 Node* *casted_receiver);
854
855 // Inexact type check used for predicted calls.
856 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
857 Node** casted_receiver);
858
859 // implementation of object creation
860 Node* set_output_for_allocation(AllocateNode* alloc,
861 const TypeOopPtr* oop_type,
862 bool deoptimize_on_exception=false);
863 Node* get_layout_helper(Node* klass_node, jint& constant_value);
864 Node* new_instance(Node* klass_node,
865 Node* slow_test = nullptr,
866 Node* *return_size_val = nullptr,
867 bool deoptimize_on_exception = false);
868 Node* new_array(Node* klass_node, Node* count_val, int nargs,
869 Node* *return_size_val = nullptr,
870 bool deoptimize_on_exception = false);
871
872 // java.lang.String helpers
873 Node* load_String_length(Node* str, bool set_ctrl);
874 Node* load_String_value(Node* str, bool set_ctrl);
875 Node* load_String_coder(Node* str, bool set_ctrl);
876 void store_String_value(Node* str, Node* value);
877 void store_String_coder(Node* str, Node* value);
878 Node* capture_memory(const TypePtr* src_type, const TypePtr* dst_type);
879 Node* compress_string(Node* src, const TypeAryPtr* src_type, Node* dst, Node* count);
880 void inflate_string(Node* src, Node* dst, const TypeAryPtr* dst_type, Node* count);
881 void inflate_string_slow(Node* src, Node* dst, Node* start, Node* count);
882
883 // Handy for making control flow
884 IfNode* create_and_map_if(Node* ctrl, Node* tst, float prob, float cnt) {
885 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
886 _gvn.set_type(iff, iff->Value(&_gvn)); // Value may be known at parse-time
887 // Place 'if' on worklist if it will be in graph
888 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
889 return iff;
890 }
891
892 IfNode* create_and_xform_if(Node* ctrl, Node* tst, float prob, float cnt) {
893 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
894 _gvn.transform(iff); // Value may be known at parse-time
895 // Place 'if' on worklist if it will be in graph
896 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
897 return iff;
898 }
899
900 void add_parse_predicates(int nargs = 0);
901 void add_parse_predicate(Deoptimization::DeoptReason reason, int nargs);
902
903 Node* make_constant_from_field(ciField* field, Node* obj);
904
905 // Vector API support (implemented in vectorIntrinsics.cpp)
906 Node* box_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool deoptimize_on_exception = false);
907 Node* unbox_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool shuffle_to_vector = false);
908 Node* vector_shift_count(Node* cnt, int shift_op, BasicType bt, int num_elem);
909 };
910
911 // Helper class to support building of control flow branches. Upon
912 // creation the map and sp at bci are cloned and restored upon de-
913 // struction. Typical use:
914 //
915 // { PreserveJVMState pjvms(this);
916 // // code of new branch
917 // }
918 // // here the JVM state at bci is established
919
920 class PreserveJVMState: public StackObj {
921 protected:
922 GraphKit* _kit;
923 #ifdef ASSERT
|
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_GRAPHKIT_HPP
26 #define SHARE_OPTO_GRAPHKIT_HPP
27
28 #include "ci/ciEnv.hpp"
29 #include "ci/ciMethodData.hpp"
30 #include "gc/shared/c2/barrierSetC2.hpp"
31 #include "opto/addnode.hpp"
32 #include "opto/callnode.hpp"
33 #include "opto/cfgnode.hpp"
34 #include "opto/compile.hpp"
35 #include "opto/divnode.hpp"
36 #include "opto/inlinetypenode.hpp"
37 #include "opto/mulnode.hpp"
38 #include "opto/phaseX.hpp"
39 #include "opto/subnode.hpp"
40 #include "opto/type.hpp"
41 #include "runtime/deoptimization.hpp"
42
43 class BarrierSetC2;
44 class FastLockNode;
45 class FastUnlockNode;
46 class IdealKit;
47 class LibraryCallKit;
48 class Parse;
49 class RootNode;
50
51 //-----------------------------------------------------------------------------
52 //----------------------------GraphKit-----------------------------------------
53 // Toolkit for building the common sorts of subgraphs.
54 // Does not know about bytecode parsing or type-flow results.
55 // It is able to create graphs implementing the semantics of most
56 // or all bytecodes, so that it can expand intrinsics and calls.
57 // It may depend on JVMState structure, but it must not depend
58 // on specific bytecode streams.
59 class GraphKit : public Phase {
60 friend class PreserveJVMState;
61
62 protected:
63 ciEnv* _env; // Compilation environment
64 PhaseGVN &_gvn; // Some optimizations while parsing
65 SafePointNode* _map; // Parser map from JVM to Nodes
66 SafePointNode* _exceptions;// Parser map(s) for exception state(s)
67 int _bci; // JVM Bytecode Pointer
68 ciMethod* _method; // JVM Current Method
69 BarrierSetC2* _barrier_set;
70 #ifdef ASSERT
71 uint _worklist_size;
72 #endif
73
74 private:
75 int _sp; // JVM Expression Stack Pointer; don't modify directly!
76
77 private:
78 SafePointNode* map_not_null() const {
79 assert(_map != nullptr, "must call stopped() to test for reset compiler map");
80 return _map;
81 }
82
83 public:
84 GraphKit(); // empty constructor
85 GraphKit(JVMState* jvms, PhaseGVN* gvn = nullptr); // the JVM state on which to operate
86
87 #ifdef ASSERT
88 ~GraphKit() {
89 assert(!has_exceptions(), "user must call transfer_exceptions_into_jvms");
90 #if 0
91 // During incremental inlining, the Node_Array of the C->for_igvn() worklist and the IGVN
92 // worklist are shared but the _in_worklist VectorSet is not. To avoid inconsistencies,
93 // we should not add nodes to the _for_igvn worklist when using IGVN for the GraphKit.
94 assert((_gvn.is_IterGVN() == nullptr) || (_gvn.C->for_igvn()->size() == _worklist_size),
95 "GraphKit should not modify _for_igvn worklist after parsing");
96 #endif
97 }
98 #endif
99
100 virtual Parse* is_Parse() const { return nullptr; }
101 virtual LibraryCallKit* is_LibraryCallKit() const { return nullptr; }
102
103 ciEnv* env() const { return _env; }
104 PhaseGVN& gvn() const { return _gvn; }
105 void* barrier_set_state() const { return C->barrier_set_state(); }
106
107 void record_for_igvn(Node* n) const { _gvn.record_for_igvn(n); }
108 void remove_for_igvn(Node* n) const { C->remove_for_igvn(n); }
109
110 // Handy well-known nodes:
111 Node* null() const { return zerocon(T_OBJECT); }
112 Node* top() const { return C->top(); }
113 RootNode* root() const { return C->root(); }
114
115 // Create or find a constant node
116 Node* intcon(jint con) const { return _gvn.intcon(con); }
117 Node* longcon(jlong con) const { return _gvn.longcon(con); }
118 Node* integercon(jlong con, BasicType bt) const {
119 if (bt == T_INT) {
120 return intcon(checked_cast<jint>(con));
121 }
122 assert(bt == T_LONG, "basic type not an int or long");
123 return longcon(con);
124 }
125 Node* makecon(const Type *t) const { return _gvn.makecon(t); }
126 Node* zerocon(BasicType bt) const { return _gvn.zerocon(bt); }
127 // (See also macro MakeConX in type.hpp, which uses intcon or longcon.)
354 Node* ConvL2I(Node* offset);
355 // Find out the klass of an object.
356 Node* load_object_klass(Node* object);
357 // Find out the length of an array.
358 Node* load_array_length(Node* array);
359 // Cast array allocation's length as narrow as possible.
360 // If replace_length_in_map is true, replace length with CastIINode in map.
361 // This method is invoked after creating/moving ArrayAllocationNode or in load_array_length
362 Node* array_ideal_length(AllocateArrayNode* alloc,
363 const TypeOopPtr* oop_type,
364 bool replace_length_in_map);
365
366
367 // Helper function to do a null pointer check or ZERO check based on type.
368 // Throw an exception if a given value is null.
369 // Return the value cast to not-null.
370 // Be clever about equivalent dominating null checks.
371 Node* null_check_common(Node* value, BasicType type,
372 bool assert_null = false,
373 Node* *null_control = nullptr,
374 bool speculative = false,
375 bool is_init_check = false);
376 Node* null_check(Node* value, BasicType type = T_OBJECT) {
377 return null_check_common(value, type, false, nullptr, !_gvn.type(value)->speculative_maybe_null());
378 }
379 Node* null_check_receiver() {
380 return null_check(argument(0));
381 }
382 Node* zero_check_int(Node* value) {
383 assert(value->bottom_type()->basic_type() == T_INT,
384 "wrong type: %s", type2name(value->bottom_type()->basic_type()));
385 return null_check_common(value, T_INT);
386 }
387 Node* zero_check_long(Node* value) {
388 assert(value->bottom_type()->basic_type() == T_LONG,
389 "wrong type: %s", type2name(value->bottom_type()->basic_type()));
390 return null_check_common(value, T_LONG);
391 }
392 // Throw an uncommon trap if a given value is __not__ null.
393 // Return the value cast to null, and be clever about dominating checks.
394 Node* null_assert(Node* value, BasicType type = T_OBJECT) {
395 return null_check_common(value, type, true, nullptr, _gvn.type(value)->speculative_always_null());
396 }
397
398 // Check if value is null and abort if it is
399 Node* must_be_not_null(Node* value, bool do_replace_in_map);
597 }
598 // This is the base version which is given alias index
599 // Return the new StoreXNode
600 Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt,
601 int adr_idx,
602 MemNode::MemOrd,
603 bool require_atomic_access = false,
604 bool unaligned = false,
605 bool mismatched = false,
606 bool unsafe = false,
607 int barrier_data = 0);
608
609 // Perform decorated accesses
610
611 Node* access_store_at(Node* obj, // containing obj
612 Node* adr, // actual address to store val at
613 const TypePtr* adr_type,
614 Node* val,
615 const Type* val_type,
616 BasicType bt,
617 DecoratorSet decorators,
618 bool safe_for_replace = true);
619
620 Node* access_load_at(Node* obj, // containing obj
621 Node* adr, // actual address to load val at
622 const TypePtr* adr_type,
623 const Type* val_type,
624 BasicType bt,
625 DecoratorSet decorators,
626 Node* ctl = nullptr);
627
628 Node* access_load(Node* adr, // actual address to load val at
629 const Type* val_type,
630 BasicType bt,
631 DecoratorSet decorators);
632
633 Node* access_atomic_cmpxchg_val_at(Node* obj,
634 Node* adr,
635 const TypePtr* adr_type,
636 int alias_idx,
637 Node* expected_val,
638 Node* new_val,
639 const Type* value_type,
640 BasicType bt,
641 DecoratorSet decorators);
642
643 Node* access_atomic_cmpxchg_bool_at(Node* obj,
644 Node* adr,
645 const TypePtr* adr_type,
646 int alias_idx,
684 void make_dtrace_method_entry_exit(ciMethod* method, bool is_entry);
685 void make_dtrace_method_entry(ciMethod* method) {
686 make_dtrace_method_entry_exit(method, true);
687 }
688 void make_dtrace_method_exit(ciMethod* method) {
689 make_dtrace_method_entry_exit(method, false);
690 }
691
692 //--------------- stub generation -------------------
693 public:
694 void gen_stub(address C_function,
695 const char *name,
696 int is_fancy_jump,
697 bool pass_tls,
698 bool return_pc);
699
700 //---------- help for generating calls --------------
701
702 // Do a null check on the receiver as it would happen before the call to
703 // callee (with all arguments still on the stack).
704 Node* null_check_receiver_before_call(ciMethod* callee, bool replace_value = true) {
705 assert(!callee->is_static(), "must be a virtual method");
706 // Callsite signature can be different from actual method being called (i.e _linkTo* sites).
707 // Use callsite signature always.
708 ciMethod* declared_method = method()->get_method_at_bci(bci());
709 const int nargs = declared_method->arg_size();
710 inc_sp(nargs);
711 Node* n = null_check_receiver();
712 dec_sp(nargs);
713 return n;
714 }
715
716 // Fill in argument edges for the call from argument(0), argument(1), ...
717 // (The next step is to call set_edges_for_java_call.)
718 void set_arguments_for_java_call(CallJavaNode* call, bool is_late_inline = false);
719
720 // Fill in non-argument edges for the call.
721 // Transform the call, and update the basics: control, i_o, memory.
722 // (The next step is usually to call set_results_for_java_call.)
723 void set_edges_for_java_call(CallJavaNode* call,
724 bool must_throw = false, bool separate_io_proj = false);
725
726 // Finish up a java call that was started by set_edges_for_java_call.
727 // Call add_exception on any throw arising from the call.
728 // Return the call result (transformed).
729 Node* set_results_for_java_call(CallJavaNode* call, bool separate_io_proj = false, bool deoptimize = false);
730
731 // Similar to set_edges_for_java_call, but simplified for runtime calls.
732 void set_predefined_output_for_runtime_call(Node* call) {
733 set_predefined_output_for_runtime_call(call, nullptr, nullptr);
734 }
735 void set_predefined_output_for_runtime_call(Node* call,
736 Node* keep_mem,
737 const TypePtr* hook_mem);
738 Node* set_predefined_input_for_runtime_call(SafePointNode* call, Node* narrow_mem = nullptr);
837 void merge_memory(Node* new_mem, Node* region, int new_path);
838 void make_slow_call_ex(Node* call, ciInstanceKlass* ex_klass, bool separate_io_proj, bool deoptimize = false);
839
840 // Helper functions to build synchronizations
841 int next_monitor();
842 Node* insert_mem_bar(int opcode, Node* precedent = nullptr);
843 Node* insert_mem_bar_volatile(int opcode, int alias_idx, Node* precedent = nullptr);
844 // Optional 'precedent' is appended as an extra edge, to force ordering.
845 FastLockNode* shared_lock(Node* obj);
846 void shared_unlock(Node* box, Node* obj);
847
848 // helper functions for the fast path/slow path idioms
849 Node* fast_and_slow(Node* in, const Type *result_type, Node* null_result, IfNode* fast_test, Node* fast_result, address slow_call, const TypeFunc *slow_call_type, Node* slow_arg, Klass* ex_klass, Node* slow_result);
850
851 // Generate an instance-of idiom. Used by both the instance-of bytecode
852 // and the reflective instance-of call.
853 Node* gen_instanceof(Node *subobj, Node* superkls, bool safe_for_replace = false);
854
855 // Generate a check-cast idiom. Used by both the check-cast bytecode
856 // and the array-store bytecode
857 Node* gen_checkcast(Node *subobj, Node* superkls, Node* *failure_control = nullptr, bool null_free = false);
858
859 // Inline types
860 Node* inline_type_test(Node* obj, bool is_inline = true);
861 Node* is_val_mirror(Node* mirror);
862 Node* array_lh_test(Node* kls, jint mask, jint val, bool eq = true);
863 Node* flat_array_test(Node* array_or_klass, bool flat = true);
864 Node* null_free_array_test(Node* klass, bool null_free = true);
865 Node* inline_array_null_guard(Node* ary, Node* val, int nargs, bool safe_for_replace = false);
866
867 Node* gen_subtype_check(Node* obj, Node* superklass);
868
869 // Exact type check used for predicted calls and casts.
870 // Rewrites (*casted_receiver) to be casted to the stronger type.
871 // (Caller is responsible for doing replace_in_map.)
872 Node* type_check_receiver(Node* receiver, ciKlass* klass, float prob,
873 Node* *casted_receiver);
874 Node* type_check(Node* recv_klass, const TypeKlassPtr* tklass, float prob);
875
876 // Inexact type check used for predicted calls.
877 Node* subtype_check_receiver(Node* receiver, ciKlass* klass,
878 Node** casted_receiver);
879
880 // implementation of object creation
881 Node* set_output_for_allocation(AllocateNode* alloc,
882 const TypeOopPtr* oop_type,
883 bool deoptimize_on_exception=false);
884 Node* get_layout_helper(Node* klass_node, jint& constant_value);
885 Node* new_instance(Node* klass_node,
886 Node* slow_test = nullptr,
887 Node* *return_size_val = nullptr,
888 bool deoptimize_on_exception = false,
889 InlineTypeNode* inline_type_node = nullptr);
890 Node* new_array(Node* klass_node, Node* count_val, int nargs,
891 Node* *return_size_val = nullptr,
892 bool deoptimize_on_exception = false);
893
894 // java.lang.String helpers
895 Node* load_String_length(Node* str, bool set_ctrl);
896 Node* load_String_value(Node* str, bool set_ctrl);
897 Node* load_String_coder(Node* str, bool set_ctrl);
898 void store_String_value(Node* str, Node* value);
899 void store_String_coder(Node* str, Node* value);
900 Node* capture_memory(const TypePtr* src_type, const TypePtr* dst_type);
901 Node* compress_string(Node* src, const TypeAryPtr* src_type, Node* dst, Node* count);
902 void inflate_string(Node* src, Node* dst, const TypeAryPtr* dst_type, Node* count);
903 void inflate_string_slow(Node* src, Node* dst, Node* start, Node* count);
904
905 // Handy for making control flow
906 IfNode* create_and_map_if(Node* ctrl, Node* tst, float prob, float cnt) {
907 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
908 _gvn.set_type(iff, iff->Value(&_gvn)); // Value may be known at parse-time
909 // Place 'if' on worklist if it will be in graph
910 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
911 return iff;
912 }
913
914 IfNode* create_and_xform_if(Node* ctrl, Node* tst, float prob, float cnt) {
915 IfNode* iff = new IfNode(ctrl, tst, prob, cnt);// New IfNode's
916 _gvn.transform(iff); // Value may be known at parse-time
917 // Place 'if' on worklist if it will be in graph
918 if (!tst->is_Con()) record_for_igvn(iff); // Range-check and Null-check removal is later
919 return iff;
920 }
921
922 void add_parse_predicates(int nargs = 0);
923 void add_parse_predicate(Deoptimization::DeoptReason reason, int nargs);
924
925 Node* make_constant_from_field(ciField* field, Node* obj);
926 Node* load_mirror_from_klass(Node* klass);
927
928 // Vector API support (implemented in vectorIntrinsics.cpp)
929 Node* box_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool deoptimize_on_exception = false);
930 Node* unbox_vector(Node* in, const TypeInstPtr* vbox_type, BasicType elem_bt, int num_elem, bool shuffle_to_vector = false);
931 Node* vector_shift_count(Node* cnt, int shift_op, BasicType bt, int num_elem);
932 };
933
934 // Helper class to support building of control flow branches. Upon
935 // creation the map and sp at bci are cloned and restored upon de-
936 // struction. Typical use:
937 //
938 // { PreserveJVMState pjvms(this);
939 // // code of new branch
940 // }
941 // // here the JVM state at bci is established
942
943 class PreserveJVMState: public StackObj {
944 protected:
945 GraphKit* _kit;
946 #ifdef ASSERT
|