5340 __ mov(rscratch1, r0);
5341
5342 __ pop_call_clobbered_registers();
5343
5344 __ cbnz(rscratch1, deoptimize_label);
5345
5346 __ leave();
5347 __ ret(lr);
5348
5349 __ BIND(deoptimize_label);
5350
5351 __ ldp(/* new sp */ rscratch1, rfp, Address(sp, 0 * wordSize));
5352 __ ldp(lr, /* new pc*/ rscratch2, Address(sp, 2 * wordSize));
5353
5354 __ mov(sp, rscratch1);
5355 __ br(rscratch2);
5356
5357 return start;
5358 }
5359
5360 // r0 = result
5361 // r1 = str1
5362 // r2 = cnt1
5363 // r3 = str2
5364 // r4 = cnt2
5365 // r10 = tmp1
5366 // r11 = tmp2
5367 address generate_compare_long_string_same_encoding(bool isLL) {
5368 __ align(CodeEntryAlignment);
5369 StubCodeMark mark(this, "StubRoutines", isLL
5370 ? "compare_long_string_same_encoding LL"
5371 : "compare_long_string_same_encoding UU");
5372 address entry = __ pc();
5373 Register result = r0, str1 = r1, cnt1 = r2, str2 = r3, cnt2 = r4,
5374 tmp1 = r10, tmp2 = r11, tmp1h = rscratch1, tmp2h = rscratch2;
5375
5376 Label LARGE_LOOP_PREFETCH, LOOP_COMPARE16, DIFF, LESS16, LESS8, CAL_DIFFERENCE, LENGTH_DIFF;
5377
5378 // exit from large loop when less than 64 bytes left to read or we're about
5379 // to prefetch memory behind array border
7972
7973 // countPositives stub for large arrays.
7974 StubRoutines::aarch64::_count_positives = generate_count_positives(StubRoutines::aarch64::_count_positives_long);
7975
7976 // array equals stub for large arrays.
7977 if (!UseSimpleArrayEquals) {
7978 StubRoutines::aarch64::_large_array_equals = generate_large_array_equals();
7979 }
7980
7981 generate_compare_long_strings();
7982
7983 generate_string_indexof_stubs();
7984
7985 // byte_array_inflate stub for large arrays.
7986 StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
7987
7988 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
7989 if (bs_nm != NULL) {
7990 StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
7991 }
7992 #ifdef COMPILER2
7993 if (UseMultiplyToLenIntrinsic) {
7994 StubRoutines::_multiplyToLen = generate_multiplyToLen();
7995 }
7996
7997 if (UseSquareToLenIntrinsic) {
7998 StubRoutines::_squareToLen = generate_squareToLen();
7999 }
8000
8001 if (UseMulAddIntrinsic) {
8002 StubRoutines::_mulAdd = generate_mulAdd();
8003 }
8004
8005 if (UseSIMDForBigIntegerShiftIntrinsics) {
8006 StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
8007 StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
8008 }
8009
8010 if (UseMontgomeryMultiplyIntrinsic) {
8011 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
|
5340 __ mov(rscratch1, r0);
5341
5342 __ pop_call_clobbered_registers();
5343
5344 __ cbnz(rscratch1, deoptimize_label);
5345
5346 __ leave();
5347 __ ret(lr);
5348
5349 __ BIND(deoptimize_label);
5350
5351 __ ldp(/* new sp */ rscratch1, rfp, Address(sp, 0 * wordSize));
5352 __ ldp(lr, /* new pc*/ rscratch2, Address(sp, 2 * wordSize));
5353
5354 __ mov(sp, rscratch1);
5355 __ br(rscratch2);
5356
5357 return start;
5358 }
5359
5360 address generate_check_lock_stack() {
5361 __ align(CodeEntryAlignment);
5362 StubCodeMark mark(this, "StubRoutines", "check_lock_stack");
5363
5364 address start = __ pc();
5365
5366 __ set_last_Java_frame(sp, rfp, lr, rscratch1);
5367 __ enter();
5368 __ push_call_clobbered_registers();
5369
5370 __ mov(c_rarg0, r9);
5371 __ call_VM_leaf(CAST_FROM_FN_PTR(address, LockStack::ensure_lock_stack_size), 1);
5372
5373
5374 __ pop_call_clobbered_registers();
5375 __ leave();
5376 __ reset_last_Java_frame(true);
5377
5378 __ ret(lr);
5379
5380 return start;
5381 }
5382
5383 // r0 = result
5384 // r1 = str1
5385 // r2 = cnt1
5386 // r3 = str2
5387 // r4 = cnt2
5388 // r10 = tmp1
5389 // r11 = tmp2
5390 address generate_compare_long_string_same_encoding(bool isLL) {
5391 __ align(CodeEntryAlignment);
5392 StubCodeMark mark(this, "StubRoutines", isLL
5393 ? "compare_long_string_same_encoding LL"
5394 : "compare_long_string_same_encoding UU");
5395 address entry = __ pc();
5396 Register result = r0, str1 = r1, cnt1 = r2, str2 = r3, cnt2 = r4,
5397 tmp1 = r10, tmp2 = r11, tmp1h = rscratch1, tmp2h = rscratch2;
5398
5399 Label LARGE_LOOP_PREFETCH, LOOP_COMPARE16, DIFF, LESS16, LESS8, CAL_DIFFERENCE, LENGTH_DIFF;
5400
5401 // exit from large loop when less than 64 bytes left to read or we're about
5402 // to prefetch memory behind array border
7995
7996 // countPositives stub for large arrays.
7997 StubRoutines::aarch64::_count_positives = generate_count_positives(StubRoutines::aarch64::_count_positives_long);
7998
7999 // array equals stub for large arrays.
8000 if (!UseSimpleArrayEquals) {
8001 StubRoutines::aarch64::_large_array_equals = generate_large_array_equals();
8002 }
8003
8004 generate_compare_long_strings();
8005
8006 generate_string_indexof_stubs();
8007
8008 // byte_array_inflate stub for large arrays.
8009 StubRoutines::aarch64::_large_byte_array_inflate = generate_large_byte_array_inflate();
8010
8011 BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
8012 if (bs_nm != NULL) {
8013 StubRoutines::aarch64::_method_entry_barrier = generate_method_entry_barrier();
8014 }
8015 if (UseFastLocking) {
8016 StubRoutines::aarch64::_check_lock_stack = generate_check_lock_stack();
8017 }
8018 #ifdef COMPILER2
8019 if (UseMultiplyToLenIntrinsic) {
8020 StubRoutines::_multiplyToLen = generate_multiplyToLen();
8021 }
8022
8023 if (UseSquareToLenIntrinsic) {
8024 StubRoutines::_squareToLen = generate_squareToLen();
8025 }
8026
8027 if (UseMulAddIntrinsic) {
8028 StubRoutines::_mulAdd = generate_mulAdd();
8029 }
8030
8031 if (UseSIMDForBigIntegerShiftIntrinsics) {
8032 StubRoutines::_bigIntegerRightShiftWorker = generate_bigIntegerRightShift();
8033 StubRoutines::_bigIntegerLeftShiftWorker = generate_bigIntegerLeftShift();
8034 }
8035
8036 if (UseMontgomeryMultiplyIntrinsic) {
8037 StubCodeMark mark(this, "StubRoutines", "montgomeryMultiply");
|