167 // unlocks are reference only this one object.
168 }
169 #endif
170 if (unique_lock != nullptr && has_one_lock) {
171 *unique_lock = lock;
172 }
173 return true;
174 }
175
176 //=============================================================================
177 //-----------------------------hash--------------------------------------------
178 uint FastLockNode::hash() const { return NO_HASH; }
179
180 uint FastLockNode::size_of() const { return sizeof(*this); }
181
182 //------------------------------cmp--------------------------------------------
183 bool FastLockNode::cmp( const Node &n ) const {
184 return (&n == this); // Always fail except on self
185 }
186
187 //=============================================================================
188 //-----------------------------hash--------------------------------------------
189 uint FastUnlockNode::hash() const { return NO_HASH; }
190
191 //------------------------------cmp--------------------------------------------
192 bool FastUnlockNode::cmp( const Node &n ) const {
193 return (&n == this); // Always fail except on self
194 }
195
196 //=============================================================================
197 //------------------------------do_monitor_enter-------------------------------
198 void Parse::do_monitor_enter() {
199 kill_dead_locals();
200
201 // Null check; get casted pointer.
202 Node* obj = null_check(peek());
203 // Check for locking null object
204 if (stopped()) return;
205
206 // the monitor object is not part of debug info expression stack
207 pop();
208
209 // Insert a FastLockNode which takes as arguments the current thread pointer,
210 // the obj pointer & the address of the stack slot pair used for the lock.
211 shared_lock(obj);
212 }
213
214 //------------------------------do_monitor_exit--------------------------------
215 void Parse::do_monitor_exit() {
216 kill_dead_locals();
217
218 pop(); // Pop oop to unlock
219 // Because monitors are guaranteed paired (else we bail out), we know
220 // the matching Lock for this Unlock. Hence we know there is no need
221 // for a null check on Unlock.
222 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
223 }
|
167 // unlocks are reference only this one object.
168 }
169 #endif
170 if (unique_lock != nullptr && has_one_lock) {
171 *unique_lock = lock;
172 }
173 return true;
174 }
175
176 //=============================================================================
177 //-----------------------------hash--------------------------------------------
178 uint FastLockNode::hash() const { return NO_HASH; }
179
180 uint FastLockNode::size_of() const { return sizeof(*this); }
181
182 //------------------------------cmp--------------------------------------------
183 bool FastLockNode::cmp( const Node &n ) const {
184 return (&n == this); // Always fail except on self
185 }
186
187 const Type* FastLockNode::Value(PhaseGVN* phase) const {
188 const Type* in1_t = phase->type(in(1));
189 if (in1_t == Type::TOP) {
190 return Type::TOP;
191 }
192 if (in1_t->is_inlinetypeptr()) {
193 // Locking on inline types always fails
194 return TypeInt::CC_GT;
195 }
196 return TypeInt::CC;
197 }
198
199 //=============================================================================
200 //-----------------------------hash--------------------------------------------
201 uint FastUnlockNode::hash() const { return NO_HASH; }
202
203 //------------------------------cmp--------------------------------------------
204 bool FastUnlockNode::cmp( const Node &n ) const {
205 return (&n == this); // Always fail except on self
206 }
207
208 //=============================================================================
209 //------------------------------do_monitor_enter-------------------------------
210 void Parse::do_monitor_enter() {
211 kill_dead_locals();
212
213 // Null check; get casted pointer.
214 Node* obj = null_check(peek());
215 // Check for locking null object
216 if (stopped()) return;
217
218 {
219 // Synchronizing on an inline type is not allowed
220 BuildCutout unless(this, inline_type_test(obj, /* is_inline = */ false), PROB_MAX);
221 uncommon_trap_exact(Deoptimization::Reason_class_check, Deoptimization::Action_none);
222 }
223
224 // the monitor object is not part of debug info expression stack
225 pop();
226
227 // Insert a FastLockNode which takes as arguments the current thread pointer,
228 // the obj pointer & the address of the stack slot pair used for the lock.
229 shared_lock(obj);
230 }
231
232 //------------------------------do_monitor_exit--------------------------------
233 void Parse::do_monitor_exit() {
234 kill_dead_locals();
235
236 pop(); // Pop oop to unlock
237 // Because monitors are guaranteed paired (else we bail out), we know
238 // the matching Lock for this Unlock. Hence we know there is no need
239 // for a null check on Unlock.
240 shared_unlock(map()->peek_monitor_box(), map()->peek_monitor_obj());
241 }
|