8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "asm/assembler.inline.hpp"
28 #include "opto/c2_MacroAssembler.hpp"
29 #include "opto/intrinsicnode.hpp"
30 #include "opto/subnode.hpp"
31 #include "runtime/stubRoutines.hpp"
32
33 #ifdef PRODUCT
34 #define BLOCK_COMMENT(str) /* nothing */
35 #define STOP(error) stop(error)
36 #else
37 #define BLOCK_COMMENT(str) block_comment(str)
38 #define STOP(error) block_comment(error); stop(error)
39 #endif
40
41 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
42
43 typedef void (MacroAssembler::* chr_insn)(Register Rt, const Address &adr);
44
45 // Search for str1 in str2 and return index or -1
46 // Clobbers: rscratch1, rscratch2, rflags. May also clobber v0-v1, when icnt1==-1.
47 void C2_MacroAssembler::string_indexof(Register str2, Register str1,
48 Register cnt2, Register cnt1,
49 Register tmp1, Register tmp2,
50 Register tmp3, Register tmp4,
51 Register tmp5, Register tmp6,
52 int icnt1, Register result, int ae) {
53 // NOTE: tmp5, tmp6 can be zr depending on specific method version
54 Label LINEARSEARCH, LINEARSTUB, LINEAR_MEDIUM, DONE, NOMATCH, MATCH;
55
56 Register ch1 = rscratch1;
57 Register ch2 = rscratch2;
58 Register cnt1tmp = tmp1;
59 Register cnt2tmp = tmp2;
60 Register cnt1_neg = cnt1;
61 Register cnt2_neg = cnt2;
62 Register result_tmp = tmp4;
63
64 bool isL = ae == StrIntrinsicNode::LL;
859 case BoolTest::eq: cmeq(dst, size, src1, src2); break;
860 case BoolTest::ne: {
861 cmeq(dst, size, src1, src2);
862 notr(dst, T16B, dst);
863 break;
864 }
865 case BoolTest::ge: cmge(dst, size, src1, src2); break;
866 case BoolTest::gt: cmgt(dst, size, src1, src2); break;
867 case BoolTest::le: cmge(dst, size, src2, src1); break;
868 case BoolTest::lt: cmgt(dst, size, src2, src1); break;
869 case BoolTest::uge: cmhs(dst, size, src1, src2); break;
870 case BoolTest::ugt: cmhi(dst, size, src1, src2); break;
871 case BoolTest::ult: cmhi(dst, size, src2, src1); break;
872 case BoolTest::ule: cmhs(dst, size, src2, src1); break;
873 default:
874 assert(false, "unsupported");
875 ShouldNotReachHere();
876 }
877 }
878 }
|
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/assembler.hpp"
27 #include "asm/assembler.inline.hpp"
28 #include "opto/c2_CodeStubs.hpp"
29 #include "opto/c2_MacroAssembler.hpp"
30 #include "opto/compile.hpp"
31 #include "opto/output.hpp"
32 #include "opto/intrinsicnode.hpp"
33 #include "opto/subnode.hpp"
34 #include "runtime/stubRoutines.hpp"
35 #include "utilities/globalDefinitions.hpp"
36
37 #ifdef PRODUCT
38 #define BLOCK_COMMENT(str) /* nothing */
39 #define STOP(error) stop(error)
40 #else
41 #define BLOCK_COMMENT(str) block_comment(str)
42 #define STOP(error) block_comment(error); stop(error)
43 #endif
44
45 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
46
47 typedef void (MacroAssembler::* chr_insn)(Register Rt, const Address &adr);
48
49 void C2_MacroAssembler::fast_lock(Register objectReg, Register boxReg, Register tmpReg,
50 Register tmp2Reg, Register tmp3Reg) {
51 Register oop = objectReg;
52 Register box = boxReg;
53 Register disp_hdr = tmpReg;
54 Register tmp = tmp2Reg;
55 Label cont;
56 Label object_has_monitor;
57 Label cas_failed;
58
59 assert(LockingMode != LM_LIGHTWEIGHT, "lightweight locking should use fast_lock_lightweight");
60 assert_different_registers(oop, box, tmp, disp_hdr);
61
62 // Load markWord from object into displaced_header.
63 ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));
64
65 if (DiagnoseSyncOnValueBasedClasses != 0) {
66 load_klass(tmp, oop);
67 ldrw(tmp, Address(tmp, Klass::access_flags_offset()));
68 tstw(tmp, JVM_ACC_IS_VALUE_BASED_CLASS);
69 br(Assembler::NE, cont);
70 }
71
72 if (UseBiasedLocking && !UseOptoBiasInlining) {
73 biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
74 }
75
76 // Check for existing monitor
77 tbnz(disp_hdr, exact_log2(markWord::monitor_value), object_has_monitor);
78
79 if (LockingMode == LM_MONITOR) {
80 tst(oop, oop); // Set NE to indicate 'failure' -> take slow-path. We know that oop != 0.
81 b(cont);
82 } else {
83 assert(LockingMode == LM_LEGACY, "must be");
84 // Set tmp to be (markWord of object | UNLOCK_VALUE).
85 orr(tmp, disp_hdr, markWord::unlocked_value);
86
87 // Initialize the box. (Must happen before we update the object mark!)
88 str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
89
90 // Compare object markWord with an unlocked value (tmp) and if
91 // equal exchange the stack address of our box with object markWord.
92 // On failure disp_hdr contains the possibly locked markWord.
93 cmpxchg(oop, tmp, box, Assembler::xword, /*acquire*/ true,
94 /*release*/ true, /*weak*/ false, disp_hdr);
95 br(Assembler::EQ, cont);
96
97 assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
98
99 // If the compare-and-exchange succeeded, then we found an unlocked
100 // object, will have now locked it will continue at label cont
101
102 bind(cas_failed);
103 // We did not see an unlocked object so try the fast recursive case.
104
105 // Check if the owner is self by comparing the value in the
106 // markWord of object (disp_hdr) with the stack pointer.
107 mov(rscratch1, sp);
108 sub(disp_hdr, disp_hdr, rscratch1);
109 mov(tmp, (address) (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
110 // If condition is true we are cont and hence we can store 0 as the
111 // displaced header in the box, which indicates that it is a recursive lock.
112 ands(tmp/*==0?*/, disp_hdr, tmp); // Sets flags for result
113 str(tmp/*==0, perhaps*/, Address(box, BasicLock::displaced_header_offset_in_bytes()));
114 b(cont);
115 }
116
117 // Handle existing monitor.
118 bind(object_has_monitor);
119
120 // The object's monitor m is unlocked iff m->owner == NULL,
121 // otherwise m->owner may contain a thread or a stack address.
122 //
123 // Try to CAS m->owner from NULL to current thread.
124 add(tmp, disp_hdr, (ObjectMonitor::owner_offset_in_bytes()-markWord::monitor_value));
125 cmpxchg(tmp, zr, rthread, Assembler::xword, /*acquire*/ true,
126 /*release*/ true, /*weak*/ false, rscratch1); // Sets flags for result
127
128 // Store a non-null value into the box to avoid looking like a re-entrant
129 // lock. The fast-path monitor unlock code checks for
130 // markWord::monitor_value so use markWord::unused_mark which has the
131 // relevant bit set, and also matches ObjectSynchronizer::enter.
132 mov(tmp, (address)markWord::unused_mark().value());
133 str(tmp, Address(box, BasicLock::displaced_header_offset_in_bytes()));
134
135 br(Assembler::EQ, cont); // CAS success means locking succeeded
136
137 cmp(rscratch1, rthread);
138 br(Assembler::NE, cont); // Check for recursive locking
139
140 // Recursive lock case
141 increment(Address(disp_hdr, ObjectMonitor::recursions_offset_in_bytes() - markWord::monitor_value), 1);
142 // flag == EQ still from the cmp above, checking if this is a reentrant lock
143
144 bind(cont);
145 // flag == EQ indicates success
146 // flag == NE indicates failure
147 }
148
149 void C2_MacroAssembler::fast_unlock(Register objectReg, Register boxReg, Register tmpReg,
150 Register tmp2Reg) {
151 Register oop = objectReg;
152 Register box = boxReg;
153 Register disp_hdr = tmpReg;
154 Register tmp = tmp2Reg;
155 Label cont;
156 Label object_has_monitor;
157
158 assert(LockingMode != LM_LIGHTWEIGHT, "lightweight locking should use fast_unlock_lightweight");
159 assert_different_registers(oop, box, tmp, disp_hdr);
160
161 if (UseBiasedLocking && !UseOptoBiasInlining) {
162 biased_locking_exit(oop, tmp, cont);
163 }
164
165 if (LockingMode == LM_LEGACY) {
166 // Find the lock address and load the displaced header from the stack.
167 ldr(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));
168
169 // If the displaced header is 0, we have a recursive unlock.
170 cmp(disp_hdr, zr);
171 br(Assembler::EQ, cont);
172 }
173
174 // Handle existing monitor.
175 ldr(tmp, Address(oop, oopDesc::mark_offset_in_bytes()));
176 tbnz(tmp, exact_log2(markWord::monitor_value), object_has_monitor);
177
178 if (LockingMode == LM_MONITOR) {
179 tst(oop, oop); // Set NE to indicate 'failure' -> take slow-path. We know that oop != 0.
180 b(cont);
181 } else {
182 assert(LockingMode == LM_LEGACY, "must be");
183 // Check if it is still a light weight lock, this is is true if we
184 // see the stack address of the basicLock in the markWord of the
185 // object.
186
187 cmpxchg(oop, box, disp_hdr, Assembler::xword, /*acquire*/ false,
188 /*release*/ true, /*weak*/ false, tmp);
189 b(cont);
190 }
191
192 assert(oopDesc::mark_offset_in_bytes() == 0, "offset of _mark is not 0");
193
194 // Handle existing monitor.
195 bind(object_has_monitor);
196 STATIC_ASSERT(markWord::monitor_value <= INT_MAX);
197 add(tmp, tmp, -(int)markWord::monitor_value); // monitor
198
199 ldr(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
200
201 Label notRecursive;
202 cbz(disp_hdr, notRecursive);
203
204 // Recursive lock
205 sub(disp_hdr, disp_hdr, 1u);
206 str(disp_hdr, Address(tmp, ObjectMonitor::recursions_offset_in_bytes()));
207 cmp(disp_hdr, disp_hdr); // Sets flags for result
208 b(cont);
209
210 bind(notRecursive);
211 ldr(rscratch1, Address(tmp, ObjectMonitor::EntryList_offset_in_bytes()));
212 ldr(disp_hdr, Address(tmp, ObjectMonitor::cxq_offset_in_bytes()));
213 orr(rscratch1, rscratch1, disp_hdr); // Will be 0 if both are 0.
214 cmp(rscratch1, zr); // Sets flags for result
215 cbnz(rscratch1, cont);
216 // need a release store here
217 lea(tmp, Address(tmp, ObjectMonitor::owner_offset_in_bytes()));
218 stlr(zr, tmp); // set unowned
219
220 bind(cont);
221 // flag == EQ indicates success
222 // flag == NE indicates failure
223 }
224
225 void C2_MacroAssembler::fast_lock_lightweight(Register obj, Register t1,
226 Register t2, Register t3) {
227 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
228 assert_different_registers(obj, t1, t2, t3);
229
230 // Handle inflated monitor.
231 Label inflated;
232 // Finish fast lock successfully. MUST branch to with flag == EQ
233 Label locked;
234 // Finish fast lock unsuccessfully. MUST branch to with flag == NE
235 Label slow_path;
236
237 if (DiagnoseSyncOnValueBasedClasses != 0) {
238 load_klass(t1, obj);
239 ldrw(t1, Address(t1, Klass::access_flags_offset()));
240 tstw(t1, JVM_ACC_IS_VALUE_BASED_CLASS);
241 br(Assembler::NE, slow_path);
242 }
243
244 const Register t1_mark = t1;
245
246 { // Lightweight locking
247
248 // Push lock to the lock stack and finish successfully. MUST branch to with flag == EQ
249 Label push;
250
251 const Register t2_top = t2;
252 const Register t3_t = t3;
253
254 // Check if lock-stack is full.
255 ldrw(t2_top, Address(rthread, JavaThread::lock_stack_top_offset()));
256 cmpw(t2_top, (unsigned)LockStack::end_offset() - 1);
257 br(Assembler::GT, slow_path);
258
259 // Check if recursive.
260 subw(t3_t, t2_top, oopSize);
261 ldr(t3_t, Address(rthread, t3_t));
262 cmp(obj, t3_t);
263 br(Assembler::EQ, push);
264
265 // Relaxed normal load to check for monitor. Optimization for monitor case.
266 ldr(t1_mark, Address(obj, oopDesc::mark_offset_in_bytes()));
267 tbnz(t1_mark, exact_log2(markWord::monitor_value), inflated);
268
269 // Not inflated
270 assert(oopDesc::mark_offset_in_bytes() == 0, "required to avoid a lea");
271
272 // Try to lock. Transition lock-bits 0b01 => 0b00
273 orr(t1_mark, t1_mark, markWord::unlocked_value);
274 eor(t3_t, t1_mark, markWord::unlocked_value);
275 cmpxchg(/*addr*/ obj, /*expected*/ t1_mark, /*new*/ t3_t, Assembler::xword,
276 /*acquire*/ true, /*release*/ false, /*weak*/ false, noreg);
277 br(Assembler::NE, slow_path);
278
279 bind(push);
280 // After successful lock, push object on lock-stack.
281 str(obj, Address(rthread, t2_top));
282 addw(t2_top, t2_top, oopSize);
283 strw(t2_top, Address(rthread, JavaThread::lock_stack_top_offset()));
284 b(locked);
285 }
286
287 { // Handle inflated monitor.
288 bind(inflated);
289
290 // mark contains the tagged ObjectMonitor*.
291 const Register t1_tagged_monitor = t1_mark;
292 const uintptr_t monitor_tag = markWord::monitor_value;
293 const Register t2_owner_addr = t2;
294 const Register t3_owner = t3;
295
296 // Compute owner address.
297 lea(t2_owner_addr, Address(t1_tagged_monitor, ObjectMonitor::owner_offset_in_bytes() - monitor_tag));
298
299 // CAS owner (null => current thread).
300 cmpxchg(t2_owner_addr, zr, rthread, Assembler::xword, /*acquire*/ true,
301 /*release*/ false, /*weak*/ false, t3_owner);
302 br(Assembler::EQ, locked);
303
304 // Check if recursive.
305 cmp(t3_owner, rthread);
306 br(Assembler::NE, slow_path);
307
308 // Recursive.
309 increment(Address(t1_tagged_monitor, ObjectMonitor::recursions_offset_in_bytes() - monitor_tag), 1);
310 }
311
312 bind(locked);
313 #ifdef ASSERT
314 // Check that locked label is reached with Flags == EQ.
315 Label flag_correct;
316 br(Assembler::EQ, flag_correct);
317 stop("Fast Lock Flag != EQ");
318 #endif
319
320 bind(slow_path);
321 #ifdef ASSERT
322 // Check that slow_path label is reached with Flags == NE.
323 br(Assembler::NE, flag_correct);
324 stop("Fast Lock Flag != NE");
325 bind(flag_correct);
326 #endif
327 // C2 uses the value of Flags (NE vs EQ) to determine the continuation.
328 }
329
330 void C2_MacroAssembler::fast_unlock_lightweight(Register obj, Register t1, Register t2,
331 Register t3) {
332 assert(LockingMode == LM_LIGHTWEIGHT, "must be");
333 assert_different_registers(obj, t1, t2, t3);
334
335 // Handle inflated monitor.
336 Label inflated, inflated_load_monitor;
337 // Finish fast unlock successfully. MUST branch to with flag == EQ
338 Label unlocked;
339 // Finish fast unlock unsuccessfully. MUST branch to with flag == NE
340 Label slow_path;
341
342 const Register t1_mark = t1;
343 const Register t2_top = t2;
344 const Register t3_t = t3;
345
346 { // Lightweight unlock
347
348 // Check if obj is top of lock-stack.
349 ldrw(t2_top, Address(rthread, JavaThread::lock_stack_top_offset()));
350 subw(t2_top, t2_top, oopSize);
351 ldr(t3_t, Address(rthread, t2_top));
352 cmp(obj, t3_t);
353 // Top of lock stack was not obj. Must be monitor.
354 br(Assembler::NE, inflated_load_monitor);
355
356 // Pop lock-stack.
357 DEBUG_ONLY(str(zr, Address(rthread, t2_top));)
358 strw(t2_top, Address(rthread, JavaThread::lock_stack_top_offset()));
359
360 // Check if recursive.
361 subw(t3_t, t2_top, oopSize);
362 ldr(t3_t, Address(rthread, t3_t));
363 cmp(obj, t3_t);
364 br(Assembler::EQ, unlocked);
365
366 // Not recursive.
367 // Load Mark.
368 ldr(t1_mark, Address(obj, oopDesc::mark_offset_in_bytes()));
369
370 // Check header for monitor (0b10).
371 tbnz(t1_mark, exact_log2(markWord::monitor_value), inflated);
372
373 // Try to unlock. Transition lock bits 0b00 => 0b01
374 assert(oopDesc::mark_offset_in_bytes() == 0, "required to avoid lea");
375 orr(t3_t, t1_mark, markWord::unlocked_value);
376 cmpxchg(/*addr*/ obj, /*expected*/ t1_mark, /*new*/ t3_t, Assembler::xword,
377 /*acquire*/ false, /*release*/ true, /*weak*/ false, noreg);
378 br(Assembler::EQ, unlocked);
379
380 // Compare and exchange failed.
381 // Restore lock-stack and handle the unlock in runtime.
382 DEBUG_ONLY(str(obj, Address(rthread, t2_top));)
383 addw(t2_top, t2_top, oopSize);
384 str(t2_top, Address(rthread, JavaThread::lock_stack_top_offset()));
385 b(slow_path);
386 }
387
388
389 { // Handle inflated monitor.
390 bind(inflated_load_monitor);
391 ldr(t1_mark, Address(obj, oopDesc::mark_offset_in_bytes()));
392 #ifdef ASSERT
393 tbnz(t1_mark, exact_log2(markWord::monitor_value), inflated);
394 stop("Fast Unlock not monitor");
395 #endif
396
397 bind(inflated);
398
399 #ifdef ASSERT
400 Label check_done;
401 subw(t2_top, t2_top, oopSize);
402 cmpw(t2_top, in_bytes(JavaThread::lock_stack_base_offset()));
403 br(Assembler::LT, check_done);
404 ldr(t3_t, Address(rthread, t2_top));
405 cmp(obj, t3_t);
406 br(Assembler::NE, inflated);
407 stop("Fast Unlock lock on stack");
408 bind(check_done);
409 #endif
410
411 // mark contains the tagged ObjectMonitor*.
412 const Register t1_monitor = t1_mark;
413 const uintptr_t monitor_tag = markWord::monitor_value;
414
415 // Untag the monitor.
416 sub(t1_monitor, t1_mark, monitor_tag);
417
418 const Register t2_recursions = t2;
419 Label not_recursive;
420
421 // Check if recursive.
422 ldr(t2_recursions, Address(t1_monitor, ObjectMonitor::recursions_offset_in_bytes()));
423 cbz(t2_recursions, not_recursive);
424
425 // Recursive unlock.
426 sub(t2_recursions, t2_recursions, 1u);
427 str(t2_recursions, Address(t1_monitor, ObjectMonitor::recursions_offset_in_bytes()));
428 // Set flag == EQ
429 cmp(t2_recursions, t2_recursions);
430 b(unlocked);
431
432 bind(not_recursive);
433
434 Label release;
435 const Register t2_owner_addr = t2;
436
437 // Compute owner address.
438 lea(t2_owner_addr, Address(t1_monitor, ObjectMonitor::owner_offset_in_bytes()));
439
440 // Check if the entry lists are empty.
441 ldr(rscratch1, Address(t1_monitor, ObjectMonitor::EntryList_offset_in_bytes()));
442 ldr(t3_t, Address(t1_monitor, ObjectMonitor::cxq_offset_in_bytes()));
443 orr(rscratch1, rscratch1, t3_t);
444 cmp(rscratch1, zr);
445 br(Assembler::EQ, release);
446
447 // The owner may be anonymous and we removed the last obj entry in
448 // the lock-stack. This loses the information about the owner.
449 // Write the thread to the owner field so the runtime knows the owner.
450 str(rthread, Address(t2_owner_addr));
451 b(slow_path);
452
453 bind(release);
454 // Set owner to null.
455 // Release to satisfy the JMM
456 stlr(zr, t2_owner_addr);
457 }
458
459 bind(unlocked);
460 #ifdef ASSERT
461 // Check that unlocked label is reached with Flags == EQ.
462 Label flag_correct;
463 br(Assembler::EQ, flag_correct);
464 stop("Fast Unlock Flag != EQ");
465 #endif
466
467 bind(slow_path);
468 #ifdef ASSERT
469 // Check that slow_path label is reached with Flags == NE.
470 br(Assembler::NE, flag_correct);
471 stop("Fast Unlock Flag != NE");
472 bind(flag_correct);
473 #endif
474 // C2 uses the value of Flags (NE vs EQ) to determine the continuation.
475 }
476
477 // Search for str1 in str2 and return index or -1
478 // Clobbers: rscratch1, rscratch2, rflags. May also clobber v0-v1, when icnt1==-1.
479 void C2_MacroAssembler::string_indexof(Register str2, Register str1,
480 Register cnt2, Register cnt1,
481 Register tmp1, Register tmp2,
482 Register tmp3, Register tmp4,
483 Register tmp5, Register tmp6,
484 int icnt1, Register result, int ae) {
485 // NOTE: tmp5, tmp6 can be zr depending on specific method version
486 Label LINEARSEARCH, LINEARSTUB, LINEAR_MEDIUM, DONE, NOMATCH, MATCH;
487
488 Register ch1 = rscratch1;
489 Register ch2 = rscratch2;
490 Register cnt1tmp = tmp1;
491 Register cnt2tmp = tmp2;
492 Register cnt1_neg = cnt1;
493 Register cnt2_neg = cnt2;
494 Register result_tmp = tmp4;
495
496 bool isL = ae == StrIntrinsicNode::LL;
1291 case BoolTest::eq: cmeq(dst, size, src1, src2); break;
1292 case BoolTest::ne: {
1293 cmeq(dst, size, src1, src2);
1294 notr(dst, T16B, dst);
1295 break;
1296 }
1297 case BoolTest::ge: cmge(dst, size, src1, src2); break;
1298 case BoolTest::gt: cmgt(dst, size, src1, src2); break;
1299 case BoolTest::le: cmge(dst, size, src2, src1); break;
1300 case BoolTest::lt: cmgt(dst, size, src2, src1); break;
1301 case BoolTest::uge: cmhs(dst, size, src1, src2); break;
1302 case BoolTest::ugt: cmhi(dst, size, src1, src2); break;
1303 case BoolTest::ult: cmhi(dst, size, src2, src1); break;
1304 case BoolTest::ule: cmhs(dst, size, src2, src1); break;
1305 default:
1306 assert(false, "unsupported");
1307 ShouldNotReachHere();
1308 }
1309 }
1310 }
1311
1312 void C2_MacroAssembler::load_nklass_compact(Register dst, Register obj, Register index, int scale, int disp) {
1313 C2LoadNKlassStub* stub = new (Compile::current()->comp_arena()) C2LoadNKlassStub(dst);
1314 Compile::current()->output()->add_stub(stub);
1315
1316 // Note: Don't clobber obj anywhere in that method!
1317
1318 // The incoming address is pointing into obj-start + klass_offset_in_bytes. We need to extract
1319 // obj-start, so that we can load from the object's mark-word instead. Usually the address
1320 // comes as obj-start in obj and klass_offset_in_bytes in disp. However, sometimes C2
1321 // emits code that pre-computes obj-start + klass_offset_in_bytes into a register, and
1322 // then passes that register as obj and 0 in disp. The following code extracts the base
1323 // and offset to load the mark-word.
1324 int offset = oopDesc::mark_offset_in_bytes() + disp - oopDesc::klass_offset_in_bytes();
1325 if (index == noreg) {
1326 ldr(dst, Address(obj, offset));
1327 } else {
1328 lea(dst, Address(obj, index, Address::lsl(scale)));
1329 ldr(dst, Address(dst, offset));
1330 }
1331 // NOTE: We can't use tbnz here, because the target is sometimes too far away
1332 // and cannot be encoded.
1333 tst(dst, markWord::monitor_value);
1334 br(Assembler::NE, stub->entry());
1335 bind(stub->continuation());
1336 lsr(dst, dst, markWord::klass_shift);
1337 }
|