173 // unlocks are reference only this one object.
174 }
175 #endif
176 if (unique_lock != nullptr && has_one_lock) {
177 *unique_lock = lock;
178 }
179 return true;
180 }
181
182 //=============================================================================
183 //-----------------------------hash--------------------------------------------
184 uint FastLockNode::hash() const { return NO_HASH; }
185
186 uint FastLockNode::size_of() const { return sizeof(*this); }
187
188 //------------------------------cmp--------------------------------------------
189 bool FastLockNode::cmp( const Node &n ) const {
190 return (&n == this); // Always fail except on self
191 }
192
193 //=============================================================================
194 //-----------------------------hash--------------------------------------------
195 uint FastUnlockNode::hash() const { return NO_HASH; }
196
197 //------------------------------cmp--------------------------------------------
198 bool FastUnlockNode::cmp( const Node &n ) const {
199 return (&n == this); // Always fail except on self
200 }
201
202 //=============================================================================
203 //------------------------------do_monitor_enter-------------------------------
204 void Parse::do_monitor_enter() {
205 kill_dead_locals();
206
207 // Null check; get casted pointer.
208 Node* obj = null_check(peek());
209 // Check for locking null object
210 if (stopped()) return;
211
212 // the monitor object is not part of debug info expression stack
213 pop();
214
215 // Insert a FastLockNode which takes as arguments the current thread pointer,
216 // the obj pointer & the address of the stack slot pair used for the lock.
217 shared_lock(obj);
218 }
219
220 //------------------------------do_monitor_exit--------------------------------
221 void Parse::do_monitor_exit() {
222 kill_dead_locals();
223
224 pop(); // Pop oop to unlock
225 // Because monitors are guaranteed paired (else we bail out), we know
226 // the matching Lock for this Unlock. Hence we know there is no need
227 // for a null check on Unlock.
228 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
229 }
|
173 // unlocks are reference only this one object.
174 }
175 #endif
176 if (unique_lock != nullptr && has_one_lock) {
177 *unique_lock = lock;
178 }
179 return true;
180 }
181
182 //=============================================================================
183 //-----------------------------hash--------------------------------------------
184 uint FastLockNode::hash() const { return NO_HASH; }
185
186 uint FastLockNode::size_of() const { return sizeof(*this); }
187
188 //------------------------------cmp--------------------------------------------
189 bool FastLockNode::cmp( const Node &n ) const {
190 return (&n == this); // Always fail except on self
191 }
192
193 const Type* FastLockNode::Value(PhaseGVN* phase) const {
194 const Type* in1_t = phase->type(in(1));
195 if (in1_t == Type::TOP) {
196 return Type::TOP;
197 }
198 if (in1_t->is_inlinetypeptr()) {
199 // Locking on inline types always fails
200 return TypeInt::CC_GT;
201 }
202 return TypeInt::CC;
203 }
204
205 //=============================================================================
206 //-----------------------------hash--------------------------------------------
207 uint FastUnlockNode::hash() const { return NO_HASH; }
208
209 //------------------------------cmp--------------------------------------------
210 bool FastUnlockNode::cmp( const Node &n ) const {
211 return (&n == this); // Always fail except on self
212 }
213
214 //=============================================================================
215 //------------------------------do_monitor_enter-------------------------------
216 void Parse::do_monitor_enter() {
217 kill_dead_locals();
218
219 // Null check; get casted pointer.
220 Node* obj = null_check(peek());
221 // Check for locking null object
222 if (stopped()) return;
223
224 {
225 // Synchronizing on an inline type is not allowed
226 BuildCutout unless(this, inline_type_test(obj, /* is_inline = */ false), PROB_MAX);
227 uncommon_trap_exact(Deoptimization::Reason_class_check, Deoptimization::Action_none);
228 }
229
230 // the monitor object is not part of debug info expression stack
231 pop();
232
233 // Insert a FastLockNode which takes as arguments the current thread pointer,
234 // the obj pointer & the address of the stack slot pair used for the lock.
235 shared_lock(obj);
236 }
237
238 //------------------------------do_monitor_exit--------------------------------
239 void Parse::do_monitor_exit() {
240 kill_dead_locals();
241
242 pop(); // Pop oop to unlock
243 // Because monitors are guaranteed paired (else we bail out), we know
244 // the matching Lock for this Unlock. Hence we know there is no need
245 // for a null check on Unlock.
246 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
247 }
|