168 // unlocks are reference only this one object.
169 }
170 #endif
171 if (unique_lock != nullptr && has_one_lock) {
172 *unique_lock = lock;
173 }
174 return true;
175 }
176
177 //=============================================================================
178 //-----------------------------hash--------------------------------------------
179 uint FastLockNode::hash() const { return NO_HASH; }
180
181 uint FastLockNode::size_of() const { return sizeof(*this); }
182
183 //------------------------------cmp--------------------------------------------
184 bool FastLockNode::cmp( const Node &n ) const {
185 return (&n == this); // Always fail except on self
186 }
187
188 //=============================================================================
189 //-----------------------------hash--------------------------------------------
190 uint FastUnlockNode::hash() const { return NO_HASH; }
191
192 //------------------------------cmp--------------------------------------------
193 bool FastUnlockNode::cmp( const Node &n ) const {
194 return (&n == this); // Always fail except on self
195 }
196
197 //=============================================================================
198 //------------------------------do_monitor_enter-------------------------------
199 void Parse::do_monitor_enter() {
200 kill_dead_locals();
201
202 // Null check; get casted pointer.
203 Node* obj = null_check(peek());
204 // Check for locking null object
205 if (stopped()) return;
206
207 // the monitor object is not part of debug info expression stack
208 pop();
209
210 // Insert a FastLockNode which takes as arguments the current thread pointer,
211 // the obj pointer & the address of the stack slot pair used for the lock.
212 shared_lock(obj);
213 }
214
215 //------------------------------do_monitor_exit--------------------------------
216 void Parse::do_monitor_exit() {
217 kill_dead_locals();
218
219 pop(); // Pop oop to unlock
220 // Because monitors are guaranteed paired (else we bail out), we know
221 // the matching Lock for this Unlock. Hence we know there is no need
222 // for a null check on Unlock.
223 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
224 }
|
168 // unlocks are reference only this one object.
169 }
170 #endif
171 if (unique_lock != nullptr && has_one_lock) {
172 *unique_lock = lock;
173 }
174 return true;
175 }
176
177 //=============================================================================
178 //-----------------------------hash--------------------------------------------
179 uint FastLockNode::hash() const { return NO_HASH; }
180
181 uint FastLockNode::size_of() const { return sizeof(*this); }
182
183 //------------------------------cmp--------------------------------------------
184 bool FastLockNode::cmp( const Node &n ) const {
185 return (&n == this); // Always fail except on self
186 }
187
188 const Type* FastLockNode::Value(PhaseGVN* phase) const {
189 const Type* in1_t = phase->type(in(1));
190 if (in1_t == Type::TOP) {
191 return Type::TOP;
192 }
193 if (in1_t->is_inlinetypeptr()) {
194 // Locking on inline types always fails
195 return TypeInt::CC_GT;
196 }
197 return TypeInt::CC;
198 }
199
200 //=============================================================================
201 //-----------------------------hash--------------------------------------------
202 uint FastUnlockNode::hash() const { return NO_HASH; }
203
204 //------------------------------cmp--------------------------------------------
205 bool FastUnlockNode::cmp( const Node &n ) const {
206 return (&n == this); // Always fail except on self
207 }
208
209 //=============================================================================
210 //------------------------------do_monitor_enter-------------------------------
211 void Parse::do_monitor_enter() {
212 kill_dead_locals();
213
214 // Null check; get casted pointer.
215 Node* obj = null_check(peek());
216 // Check for locking null object
217 if (stopped()) return;
218
219 {
220 // Synchronizing on an inline type is not allowed
221 BuildCutout unless(this, inline_type_test(obj, /* is_inline = */ false), PROB_MAX);
222 uncommon_trap_exact(Deoptimization::Reason_class_check, Deoptimization::Action_none);
223 }
224
225 // the monitor object is not part of debug info expression stack
226 pop();
227
228 // Insert a FastLockNode which takes as arguments the current thread pointer,
229 // the obj pointer & the address of the stack slot pair used for the lock.
230 shared_lock(obj);
231 }
232
233 //------------------------------do_monitor_exit--------------------------------
234 void Parse::do_monitor_exit() {
235 kill_dead_locals();
236
237 pop(); // Pop oop to unlock
238 // Because monitors are guaranteed paired (else we bail out), we know
239 // the matching Lock for this Unlock. Hence we know there is no need
240 // for a null check on Unlock.
241 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
242 }
|