134 RTMLockingCounters* _rtm_counters; // RTM lock counters for inflated locks
135 RTMLockingCounters* _stack_rtm_counters; // RTM lock counters for stack locks
136
137 public:
138 FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
139 init_req(0,ctrl);
140 init_class_id(Class_FastLock);
141 _rtm_counters = nullptr;
142 _stack_rtm_counters = nullptr;
143 }
144 Node* obj_node() const { return in(1); }
145 Node* box_node() const { return in(2); }
146 void set_box_node(Node* box) { set_req(2, box); }
147
148 // FastLock and FastUnlockNode do not hash, we need one for each corresponding
149 // LockNode/UnLockNode to avoid creating Phi's.
150 virtual uint hash() const ; // { return NO_HASH; }
151 virtual uint size_of() const;
152 virtual bool cmp( const Node &n ) const ; // Always fail, except on self
153 virtual int Opcode() const;
154 virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
155 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
156
157 void create_rtm_lock_counter(JVMState* state);
158 RTMLockingCounters* rtm_counters() const { return _rtm_counters; }
159 RTMLockingCounters* stack_rtm_counters() const { return _stack_rtm_counters; }
160 };
161
162
163 //------------------------------FastUnlockNode---------------------------------
164 class FastUnlockNode: public CmpNode {
165 public:
166 FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
167 init_req(0,ctrl);
168 init_class_id(Class_FastUnlock);
169 }
170 Node* obj_node() const { return in(1); }
171 Node* box_node() const { return in(2); }
172
173
174 // FastLock and FastUnlockNode do not hash, we need one for each corresponding
|
134 RTMLockingCounters* _rtm_counters; // RTM lock counters for inflated locks
135 RTMLockingCounters* _stack_rtm_counters; // RTM lock counters for stack locks
136
137 public:
138 FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
139 init_req(0,ctrl);
140 init_class_id(Class_FastLock);
141 _rtm_counters = nullptr;
142 _stack_rtm_counters = nullptr;
143 }
144 Node* obj_node() const { return in(1); }
145 Node* box_node() const { return in(2); }
146 void set_box_node(Node* box) { set_req(2, box); }
147
148 // FastLock and FastUnlockNode do not hash, we need one for each corresponding
149 // LockNode/UnLockNode to avoid creating Phi's.
150 virtual uint hash() const ; // { return NO_HASH; }
151 virtual uint size_of() const;
152 virtual bool cmp( const Node &n ) const ; // Always fail, except on self
153 virtual int Opcode() const;
154 virtual const Type* Value(PhaseGVN* phase) const;
155 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
156
157 void create_rtm_lock_counter(JVMState* state);
158 RTMLockingCounters* rtm_counters() const { return _rtm_counters; }
159 RTMLockingCounters* stack_rtm_counters() const { return _stack_rtm_counters; }
160 };
161
162
163 //------------------------------FastUnlockNode---------------------------------
164 class FastUnlockNode: public CmpNode {
165 public:
166 FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
167 init_req(0,ctrl);
168 init_class_id(Class_FastUnlock);
169 }
170 Node* obj_node() const { return in(1); }
171 Node* box_node() const { return in(2); }
172
173
174 // FastLock and FastUnlockNode do not hash, we need one for each corresponding
|