5274 __ mov(rscratch1, r0);
5275
5276 __ pop_call_clobbered_registers();
5277
5278 __ cbnz(rscratch1, deoptimize_label);
5279
5280 __ leave();
5281 __ ret(lr);
5282
5283 __ BIND(deoptimize_label);
5284
5285 __ ldp(/* new sp */ rscratch1, rfp, Address(sp, 0 * wordSize));
5286 __ ldp(lr, /* new pc*/ rscratch2, Address(sp, 2 * wordSize));
5287
5288 __ mov(sp, rscratch1);
5289 __ br(rscratch2);
5290
5291 return start;
5292 }
5293
5294 // r0 = result
5295 // r1 = str1
5296 // r2 = cnt1
5297 // r3 = str2
5298 // r4 = cnt2
5299 // r10 = tmp1
5300 // r11 = tmp2
5301 address generate_compare_long_string_same_encoding(bool isLL) {
5302 __ align(CodeEntryAlignment);
5303 StubCodeMark mark(this, "StubRoutines", isLL
5304 ? "compare_long_string_same_encoding LL"
5305 : "compare_long_string_same_encoding UU");
5306 address entry = __ pc();
5307 Register result = r0, str1 = r1, cnt1 = r2, str2 = r3, cnt2 = r4,
5308 tmp1 = r10, tmp2 = r11, tmp1h = rscratch1, tmp2h = rscratch2;
5309
5310 Label LARGE_LOOP_PREFETCH, LOOP_COMPARE16, DIFF, LESS16, LESS8, CAL_DIFFERENCE, LENGTH_DIFF;
5311
5312 // exit from large loop when less than 64 bytes left to read or we're about
5313 // to prefetch memory behind array border
7582
7583 // has negatives stub for large arrays.
7584 StubRoutines::aarch64::_has_negatives = generate_has_negatives(StubRoutines::aarch64::_has_negatives_long);
7585
7586 // array equals stub for large arrays.
7587 if (!UseSimpleArrayEquals) {
7588 StubRoutines::aarch64::_large_array_equals = generate_large_array_equals();
7589 }
7590
7591 generate_compare_long_strings();
7592
7593 generate_string_indexof_stubs();
7594
7595 // byte_array_inflate stub for large arrays.
7596 StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
7597
7598 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
7599 if (bs_nm != NULL) {
7600 StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
7601 }
7602 #ifdef COMPILER2
7603 if (UseMultiplyToLenIntrinsic) {
7604 StubRoutines::_multiplyToLen = generate_multiplyToLen();
7605 }
7606
7607 if (UseSquareToLenIntrinsic) {
7608 StubRoutines::_squareToLen = generate_squareToLen();
7609 }
7610
7611 if (UseMulAddIntrinsic) {
7612 StubRoutines::_mulAdd = generate_mulAdd();
7613 }
7614
7615 if (UseSIMDForBigIntegerShiftIntrinsics) {
7616 StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
7617 StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
7618 }
7619
7620 if (UseMontgomeryMultiplyIntrinsic) {
7621 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
|
5274 __ mov(rscratch1, r0);
5275
5276 __ pop_call_clobbered_registers();
5277
5278 __ cbnz(rscratch1, deoptimize_label);
5279
5280 __ leave();
5281 __ ret(lr);
5282
5283 __ BIND(deoptimize_label);
5284
5285 __ ldp(/* new sp */ rscratch1, rfp, Address(sp, 0 * wordSize));
5286 __ ldp(lr, /* new pc*/ rscratch2, Address(sp, 2 * wordSize));
5287
5288 __ mov(sp, rscratch1);
5289 __ br(rscratch2);
5290
5291 return start;
5292 }
5293
5294 address generate_check_lock_stack() {
5295 __ align(CodeEntryAlignment);
5296 StubCodeMark mark(this, "StubRoutines", "check_lock_stack");
5297
5298 address start = __ pc();
5299
5300 __ set_last_Java_frame(sp, rfp, lr, rscratch1);
5301 __ enter();
5302 __ push_call_clobbered_registers();
5303
5304 __ mov(c_rarg0, r9);
5305 __ call_VM_leaf(CAST_FROM_FN_PTR(address, LockStack::ensure_lock_stack_size), 1);
5306
5307
5308 __ pop_call_clobbered_registers();
5309 __ leave();
5310 __ reset_last_Java_frame(true);
5311
5312 __ ret(lr);
5313
5314 return start;
5315 }
5316
5317 // r0 = result
5318 // r1 = str1
5319 // r2 = cnt1
5320 // r3 = str2
5321 // r4 = cnt2
5322 // r10 = tmp1
5323 // r11 = tmp2
5324 address generate_compare_long_string_same_encoding(bool isLL) {
5325 __ align(CodeEntryAlignment);
5326 StubCodeMark mark(this, "StubRoutines", isLL
5327 ? "compare_long_string_same_encoding LL"
5328 : "compare_long_string_same_encoding UU");
5329 address entry = __ pc();
5330 Register result = r0, str1 = r1, cnt1 = r2, str2 = r3, cnt2 = r4,
5331 tmp1 = r10, tmp2 = r11, tmp1h = rscratch1, tmp2h = rscratch2;
5332
5333 Label LARGE_LOOP_PREFETCH, LOOP_COMPARE16, DIFF, LESS16, LESS8, CAL_DIFFERENCE, LENGTH_DIFF;
5334
5335 // exit from large loop when less than 64 bytes left to read or we're about
5336 // to prefetch memory behind array border
7605
7606 // has negatives stub for large arrays.
7607 StubRoutines::aarch64::_has_negatives = generate_has_negatives(StubRoutines::aarch64::_has_negatives_long);
7608
7609 // array equals stub for large arrays.
7610 if (!UseSimpleArrayEquals) {
7611 StubRoutines::aarch64::_large_array_equals = generate_large_array_equals();
7612 }
7613
7614 generate_compare_long_strings();
7615
7616 generate_string_indexof_stubs();
7617
7618 // byte_array_inflate stub for large arrays.
7619 StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
7620
7621 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
7622 if (bs_nm != NULL) {
7623 StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
7624 }
7625 if (UseFastLocking) {
7626 StubRoutines::aarch64::_check_lock_stack = generate_check_lock_stack();
7627 }
7628 #ifdef COMPILER2
7629 if (UseMultiplyToLenIntrinsic) {
7630 StubRoutines::_multiplyToLen = generate_multiplyToLen();
7631 }
7632
7633 if (UseSquareToLenIntrinsic) {
7634 StubRoutines::_squareToLen = generate_squareToLen();
7635 }
7636
7637 if (UseMulAddIntrinsic) {
7638 StubRoutines::_mulAdd = generate_mulAdd();
7639 }
7640
7641 if (UseSIMDForBigIntegerShiftIntrinsics) {
7642 StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
7643 StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
7644 }
7645
7646 if (UseMontgomeryMultiplyIntrinsic) {
7647 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
|