< prev index next >

src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp

Print this page

 300     //      rmethod: Method*
 301     //      r19_sender_sp: sender sp
 302     BLOCK_COMMENT("call Java function");
 303     __ mov(r19_sender_sp, sp);
 304     __ blr(c_rarg4);
 305 
 306     // we do this here because the notify will already have been done
 307     // if we get to the next instruction via an exception
 308     //
 309     // n.b. adding this instruction here affects the calculation of
 310     // whether or not a routine returns to the call stub (used when
 311     // doing stack walks) since the normal test is to check the return
 312     // pc against the address saved below. so we may need to allow for
 313     // this extra instruction in the check.
 314 
 315     // save current address for use by exception handling code
 316 
 317     return_address = __ pc();
 318 
 319     // store result depending on type (everything that is not
 320     // T_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 321     // n.b. this assumes Java returns an integral result in r0
 322     // and a floating result in j_farg0
 323     __ ldr(j_rarg2, result);
 324     Label is_long, is_float, is_double, exit;
 325     __ ldr(j_rarg1, result_type);
 326     __ cmp(j_rarg1, (u1)T_OBJECT);









 327     __ br(Assembler::EQ, is_long);
 328     __ cmp(j_rarg1, (u1)T_LONG);
 329     __ br(Assembler::EQ, is_long);
 330     __ cmp(j_rarg1, (u1)T_FLOAT);
 331     __ br(Assembler::EQ, is_float);
 332     __ cmp(j_rarg1, (u1)T_DOUBLE);
 333     __ br(Assembler::EQ, is_double);
 334 
 335     // handle T_INT case
 336     __ strw(r0, Address(j_rarg2));
 337 
 338     __ BIND(exit);
 339 
 340     // pop parameters
 341     __ sub(esp, rfp, -sp_after_call_off * wordSize);
 342 
 343 #ifdef ASSERT
 344     // verify that threads correspond
 345     {
 346       Label L, S;
 347       __ ldr(rscratch1, thread);
 348       __ cmp(rthread, rscratch1);
 349       __ br(Assembler::NE, S);
 350       __ get_thread(rscratch1);
 351       __ cmp(rthread, rscratch1);
 352       __ br(Assembler::EQ, L);
 353       __ BIND(S);
 354       __ stop("StubRoutines::call_stub: threads must correspond");
 355       __ BIND(L);
 356     }

 364     __ ldpd(v11, v10,  d11_save);
 365     __ ldpd(v9,  v8,   d9_save);
 366 
 367     __ ldp(r28, r27,   r28_save);
 368     __ ldp(r26, r25,   r26_save);
 369     __ ldp(r24, r23,   r24_save);
 370     __ ldp(r22, r21,   r22_save);
 371     __ ldp(r20, r19,   r20_save);
 372 
 373     __ ldp(c_rarg0, c_rarg1,  call_wrapper);
 374     __ ldrw(c_rarg2, result_type);
 375     __ ldr(c_rarg3,  method);
 376     __ ldp(c_rarg4, c_rarg5,  entry_point);
 377     __ ldp(c_rarg6, c_rarg7,  parameter_size);
 378 
 379     // leave frame and return to caller
 380     __ leave();
 381     __ ret(lr);
 382 
 383     // handle return types different from T_INT











 384 
 385     __ BIND(is_long);
 386     __ str(r0, Address(j_rarg2, 0));
 387     __ br(Assembler::AL, exit);
 388 
 389     __ BIND(is_float);
 390     __ strs(j_farg0, Address(j_rarg2, 0));
 391     __ br(Assembler::AL, exit);
 392 
 393     __ BIND(is_double);
 394     __ strd(j_farg0, Address(j_rarg2, 0));
 395     __ br(Assembler::AL, exit);
 396 
 397     return start;
 398   }
 399 
 400   // Return point for a Java call if there's an exception thrown in
 401   // Java code.  The exception is caught and transformed into a
 402   // pending exception stored in JavaThread that can be tested from
 403   // within the VM.
 404   //
 405   // Note: Usually the parameters are removed by the callee. In case
 406   // of an exception crossing an activation frame boundary, that is
 407   // not the case if the callee is compiled code => need to setup the
 408   // rsp.
 409   //
 410   // r0: exception oop
 411 
 412   address generate_catch_exception() {
 413     StubCodeMark mark(this, "StubRoutines", "catch_exception");
 414     address start = __ pc();

2083     //  |array_tag|     | header_size | element_type |     |log2_element_size|
2084     // 32        30    24            16              8     2                 0
2085     //
2086     //   array_tag: typeArray = 0x3, objArray = 0x2, non-array = 0x0
2087     //
2088 
2089     const int lh_offset = in_bytes(Klass::layout_helper_offset());
2090 
2091     // Handle objArrays completely differently...
2092     const jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
2093     __ ldrw(lh, Address(scratch_src_klass, lh_offset));
2094     __ movw(rscratch1, objArray_lh);
2095     __ eorw(rscratch2, lh, rscratch1);
2096     __ cbzw(rscratch2, L_objArray);
2097 
2098     //  if (src->klass() != dst->klass()) return -1;
2099     __ load_klass(rscratch2, dst);
2100     __ eor(rscratch2, rscratch2, scratch_src_klass);
2101     __ cbnz(rscratch2, L_failed);
2102 








2103     //  if (!src->is_Array()) return -1;
2104     __ tbz(lh, 31, L_failed);  // i.e. (lh >= 0)
2105 
2106     // At this point, it is known to be a typeArray (array_tag 0x3).
2107 #ifdef ASSERT
2108     {
2109       BLOCK_COMMENT("assert primitive array {");
2110       Label L;
2111       __ movw(rscratch2, Klass::_lh_array_tag_type_value << Klass::_lh_array_tag_shift);
2112       __ cmpw(lh, rscratch2);
2113       __ br(Assembler::GE, L);
2114       __ stop("must be a primitive array");
2115       __ bind(L);
2116       BLOCK_COMMENT("} assert primitive array done");
2117     }
2118 #endif
2119 
2120     arraycopy_range_checks(src, src_pos, dst, dst_pos, scratch_length,
2121                            rscratch2, L_failed);
2122 

7745     //       MACC(Ra, Ra, t0, t1, t2);
7746     //     }
7747     //     iters =  (2*len-i)/2;
7748     //     assert(iters == len-j, "must be");
7749     //     for (; iters--; j++) {
7750     //       assert(Rm == Pm_base[j] && Rn == Pn_base[i-j], "must be");
7751     //       MACC(Rm, Rn, t0, t1, t2);
7752     //       Rm = *++Pm;
7753     //       Rn = *--Pn;
7754     //     }
7755     //     Pm_base[i-len] = t0;
7756     //     t0 = t1; t1 = t2; t2 = 0;
7757     //   }
7758 
7759     //   while (t0)
7760     //     t0 = sub(Pm_base, Pn_base, t0, len);
7761     // }
7762   };
7763 
7764 
































































































































7765   // Initialization
7766   void generate_initial() {
7767     // Generate initial stubs and initializes the entry points
7768 
7769     // entry points that exist in all platforms Note: This is code
7770     // that could be shared among different platforms - however the
7771     // benefit seems to be smaller than the disadvantage of having a
7772     // much more complicated generator structure. See also comment in
7773     // stubRoutines.hpp.
7774 
7775     StubRoutines::_forward_exception_entry = generate_forward_exception();
7776 
7777     StubRoutines::_call_stub_entry =
7778       generate_call_stub(StubRoutines::_call_stub_return_address);
7779 
7780     // is referenced by megamorphic call
7781     StubRoutines::_catch_exception_entry = generate_catch_exception();
7782 
7783     // Build this early so it's available for the interpreter.
7784     StubRoutines::_throw_StackOverflowError_entry =

7794       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7795       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7796     }
7797 
7798     if (UseCRC32CIntrinsics) {
7799       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7800     }
7801 
7802     // Disabled until JDK-8210858 is fixed
7803     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7804     //   StubRoutines::_dlog = generate_dlog();
7805     // }
7806 
7807     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7808       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7809     }
7810 
7811     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7812       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7813     }







7814   }
7815 
7816   void generate_phase1() {
7817     // Continuation stubs:
7818     StubRoutines::_cont_thaw          = generate_cont_thaw();
7819     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
7820     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
7821 
7822     JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
7823     JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
7824   }
7825 
7826   void generate_all() {
7827     // support for verify_oop (must happen after universe_init)
7828     if (VerifyOops) {
7829       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7830     }
7831     StubRoutines::_throw_AbstractMethodError_entry =
7832       generate_throw_exception("AbstractMethodError throw_exception",
7833                                CAST_FROM_FN_PTR(address,

 300     //      rmethod: Method*
 301     //      r19_sender_sp: sender sp
 302     BLOCK_COMMENT("call Java function");
 303     __ mov(r19_sender_sp, sp);
 304     __ blr(c_rarg4);
 305 
 306     // we do this here because the notify will already have been done
 307     // if we get to the next instruction via an exception
 308     //
 309     // n.b. adding this instruction here affects the calculation of
 310     // whether or not a routine returns to the call stub (used when
 311     // doing stack walks) since the normal test is to check the return
 312     // pc against the address saved below. so we may need to allow for
 313     // this extra instruction in the check.
 314 
 315     // save current address for use by exception handling code
 316 
 317     return_address = __ pc();
 318 
 319     // store result depending on type (everything that is not
 320     // T_OBJECT, T_PRIMITIVE_OBJECT, T_LONG, T_FLOAT or T_DOUBLE is treated as T_INT)
 321     // n.b. this assumes Java returns an integral result in r0
 322     // and a floating result in j_farg0
 323     // All of j_rargN may be used to return inline type fields so be careful
 324     // not to clobber those.
 325     // SharedRuntime::generate_buffered_inline_type_adapter() knows the register
 326     // assignment of Rresult below.
 327     Register Rresult = r14, Rresult_type = r15;
 328     __ ldr(Rresult, result);
 329     Label is_long, is_float, is_double, check_prim, exit;
 330     __ ldr(Rresult_type, result_type);
 331     __ cmp(Rresult_type, (u1)T_OBJECT);
 332     __ br(Assembler::EQ, check_prim);
 333     __ cmp(Rresult_type, (u1)T_PRIMITIVE_OBJECT);
 334     __ br(Assembler::EQ, check_prim);
 335     __ cmp(Rresult_type, (u1)T_LONG);
 336     __ br(Assembler::EQ, is_long);
 337     __ cmp(Rresult_type, (u1)T_FLOAT);


 338     __ br(Assembler::EQ, is_float);
 339     __ cmp(Rresult_type, (u1)T_DOUBLE);
 340     __ br(Assembler::EQ, is_double);
 341 
 342     // handle T_INT case
 343     __ strw(r0, Address(Rresult));
 344 
 345     __ BIND(exit);
 346 
 347     // pop parameters
 348     __ sub(esp, rfp, -sp_after_call_off * wordSize);
 349 
 350 #ifdef ASSERT
 351     // verify that threads correspond
 352     {
 353       Label L, S;
 354       __ ldr(rscratch1, thread);
 355       __ cmp(rthread, rscratch1);
 356       __ br(Assembler::NE, S);
 357       __ get_thread(rscratch1);
 358       __ cmp(rthread, rscratch1);
 359       __ br(Assembler::EQ, L);
 360       __ BIND(S);
 361       __ stop("StubRoutines::call_stub: threads must correspond");
 362       __ BIND(L);
 363     }

 371     __ ldpd(v11, v10,  d11_save);
 372     __ ldpd(v9,  v8,   d9_save);
 373 
 374     __ ldp(r28, r27,   r28_save);
 375     __ ldp(r26, r25,   r26_save);
 376     __ ldp(r24, r23,   r24_save);
 377     __ ldp(r22, r21,   r22_save);
 378     __ ldp(r20, r19,   r20_save);
 379 
 380     __ ldp(c_rarg0, c_rarg1,  call_wrapper);
 381     __ ldrw(c_rarg2, result_type);
 382     __ ldr(c_rarg3,  method);
 383     __ ldp(c_rarg4, c_rarg5,  entry_point);
 384     __ ldp(c_rarg6, c_rarg7,  parameter_size);
 385 
 386     // leave frame and return to caller
 387     __ leave();
 388     __ ret(lr);
 389 
 390     // handle return types different from T_INT
 391     __ BIND(check_prim);
 392     if (InlineTypeReturnedAsFields) {
 393       // Check for scalarized return value
 394       __ tbz(r0, 0, is_long);
 395       // Load pack handler address
 396       __ andr(rscratch1, r0, -2);
 397       __ ldr(rscratch1, Address(rscratch1, InstanceKlass::adr_inlineklass_fixed_block_offset()));
 398       __ ldr(rscratch1, Address(rscratch1, InlineKlass::pack_handler_jobject_offset()));
 399       __ blr(rscratch1);
 400       __ b(exit);
 401     }
 402 
 403     __ BIND(is_long);
 404     __ str(r0, Address(Rresult, 0));
 405     __ br(Assembler::AL, exit);
 406 
 407     __ BIND(is_float);
 408     __ strs(j_farg0, Address(Rresult, 0));
 409     __ br(Assembler::AL, exit);
 410 
 411     __ BIND(is_double);
 412     __ strd(j_farg0, Address(Rresult, 0));
 413     __ br(Assembler::AL, exit);
 414 
 415     return start;
 416   }
 417 
 418   // Return point for a Java call if there's an exception thrown in
 419   // Java code.  The exception is caught and transformed into a
 420   // pending exception stored in JavaThread that can be tested from
 421   // within the VM.
 422   //
 423   // Note: Usually the parameters are removed by the callee. In case
 424   // of an exception crossing an activation frame boundary, that is
 425   // not the case if the callee is compiled code => need to setup the
 426   // rsp.
 427   //
 428   // r0: exception oop
 429 
 430   address generate_catch_exception() {
 431     StubCodeMark mark(this, "StubRoutines", "catch_exception");
 432     address start = __ pc();

2101     //  |array_tag|     | header_size | element_type |     |log2_element_size|
2102     // 32        30    24            16              8     2                 0
2103     //
2104     //   array_tag: typeArray = 0x3, objArray = 0x2, non-array = 0x0
2105     //
2106 
2107     const int lh_offset = in_bytes(Klass::layout_helper_offset());
2108 
2109     // Handle objArrays completely differently...
2110     const jint objArray_lh = Klass::array_layout_helper(T_OBJECT);
2111     __ ldrw(lh, Address(scratch_src_klass, lh_offset));
2112     __ movw(rscratch1, objArray_lh);
2113     __ eorw(rscratch2, lh, rscratch1);
2114     __ cbzw(rscratch2, L_objArray);
2115 
2116     //  if (src->klass() != dst->klass()) return -1;
2117     __ load_klass(rscratch2, dst);
2118     __ eor(rscratch2, rscratch2, scratch_src_klass);
2119     __ cbnz(rscratch2, L_failed);
2120 
2121     // Check for flat inline type array -> return -1
2122     __ tst(lh, Klass::_lh_array_tag_flat_value_bit_inplace);
2123     __ br(Assembler::NE, L_failed);
2124 
2125     // Check for null-free (non-flat) inline type array -> handle as object array
2126     __ tst(lh, Klass::_lh_null_free_array_bit_inplace);
2127     __ br(Assembler::NE, L_failed);
2128 
2129     //  if (!src->is_Array()) return -1;
2130     __ tbz(lh, 31, L_failed);  // i.e. (lh >= 0)
2131 
2132     // At this point, it is known to be a typeArray (array_tag 0x3).
2133 #ifdef ASSERT
2134     {
2135       BLOCK_COMMENT("assert primitive array {");
2136       Label L;
2137       __ movw(rscratch2, Klass::_lh_array_tag_type_value << Klass::_lh_array_tag_shift);
2138       __ cmpw(lh, rscratch2);
2139       __ br(Assembler::GE, L);
2140       __ stop("must be a primitive array");
2141       __ bind(L);
2142       BLOCK_COMMENT("} assert primitive array done");
2143     }
2144 #endif
2145 
2146     arraycopy_range_checks(src, src_pos, dst, dst_pos, scratch_length,
2147                            rscratch2, L_failed);
2148 

7771     //       MACC(Ra, Ra, t0, t1, t2);
7772     //     }
7773     //     iters =  (2*len-i)/2;
7774     //     assert(iters == len-j, "must be");
7775     //     for (; iters--; j++) {
7776     //       assert(Rm == Pm_base[j] && Rn == Pn_base[i-j], "must be");
7777     //       MACC(Rm, Rn, t0, t1, t2);
7778     //       Rm = *++Pm;
7779     //       Rn = *--Pn;
7780     //     }
7781     //     Pm_base[i-len] = t0;
7782     //     t0 = t1; t1 = t2; t2 = 0;
7783     //   }
7784 
7785     //   while (t0)
7786     //     t0 = sub(Pm_base, Pn_base, t0, len);
7787     // }
7788   };
7789 
7790 
7791   // Call here from the interpreter or compiled code to either load
7792   // multiple returned values from the inline type instance being
7793   // returned to registers or to store returned values to a newly
7794   // allocated inline type instance.
7795   address generate_return_value_stub(address destination, const char* name, bool has_res) {
7796     // We need to save all registers the calling convention may use so
7797     // the runtime calls read or update those registers. This needs to
7798     // be in sync with SharedRuntime::java_return_convention().
7799     // n.b. aarch64 asserts that frame::arg_reg_save_area_bytes == 0
7800     enum layout {
7801       j_rarg7_off = 0, j_rarg7_2,    // j_rarg7 is r0
7802       j_rarg6_off, j_rarg6_2,
7803       j_rarg5_off, j_rarg5_2,
7804       j_rarg4_off, j_rarg4_2,
7805       j_rarg3_off, j_rarg3_2,
7806       j_rarg2_off, j_rarg2_2,
7807       j_rarg1_off, j_rarg1_2,
7808       j_rarg0_off, j_rarg0_2,
7809 
7810       j_farg7_off, j_farg7_2,
7811       j_farg6_off, j_farg6_2,
7812       j_farg5_off, j_farg5_2,
7813       j_farg4_off, j_farg4_2,
7814       j_farg3_off, j_farg3_2,
7815       j_farg2_off, j_farg2_2,
7816       j_farg1_off, j_farg1_2,
7817       j_farg0_off, j_farg0_2,
7818 
7819       rfp_off, rfp_off2,
7820       return_off, return_off2,
7821 
7822       framesize // inclusive of return address
7823     };
7824 
7825     CodeBuffer code(name, 512, 64);
7826     MacroAssembler* masm = new MacroAssembler(&code);
7827 
7828     int frame_size_in_bytes = align_up(framesize*BytesPerInt, 16);
7829     assert(frame_size_in_bytes == framesize*BytesPerInt, "misaligned");
7830     int frame_size_in_slots = frame_size_in_bytes / BytesPerInt;
7831     int frame_size_in_words = frame_size_in_bytes / wordSize;
7832 
7833     OopMapSet* oop_maps = new OopMapSet();
7834     OopMap* map = new OopMap(frame_size_in_slots, 0);
7835 
7836     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg7_off), j_rarg7->as_VMReg());
7837     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg6_off), j_rarg6->as_VMReg());
7838     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg5_off), j_rarg5->as_VMReg());
7839     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg4_off), j_rarg4->as_VMReg());
7840     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg3_off), j_rarg3->as_VMReg());
7841     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg2_off), j_rarg2->as_VMReg());
7842     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg1_off), j_rarg1->as_VMReg());
7843     map->set_callee_saved(VMRegImpl::stack2reg(j_rarg0_off), j_rarg0->as_VMReg());
7844 
7845     map->set_callee_saved(VMRegImpl::stack2reg(j_farg0_off), j_farg0->as_VMReg());
7846     map->set_callee_saved(VMRegImpl::stack2reg(j_farg1_off), j_farg1->as_VMReg());
7847     map->set_callee_saved(VMRegImpl::stack2reg(j_farg2_off), j_farg2->as_VMReg());
7848     map->set_callee_saved(VMRegImpl::stack2reg(j_farg3_off), j_farg3->as_VMReg());
7849     map->set_callee_saved(VMRegImpl::stack2reg(j_farg4_off), j_farg4->as_VMReg());
7850     map->set_callee_saved(VMRegImpl::stack2reg(j_farg5_off), j_farg5->as_VMReg());
7851     map->set_callee_saved(VMRegImpl::stack2reg(j_farg6_off), j_farg6->as_VMReg());
7852     map->set_callee_saved(VMRegImpl::stack2reg(j_farg7_off), j_farg7->as_VMReg());
7853 
7854     address start = __ pc();
7855 
7856     __ enter(); // Save FP and LR before call
7857 
7858     __ stpd(j_farg1, j_farg0, Address(__ pre(sp, -2 * wordSize)));
7859     __ stpd(j_farg3, j_farg2, Address(__ pre(sp, -2 * wordSize)));
7860     __ stpd(j_farg5, j_farg4, Address(__ pre(sp, -2 * wordSize)));
7861     __ stpd(j_farg7, j_farg6, Address(__ pre(sp, -2 * wordSize)));
7862 
7863     __ stp(j_rarg1, j_rarg0, Address(__ pre(sp, -2 * wordSize)));
7864     __ stp(j_rarg3, j_rarg2, Address(__ pre(sp, -2 * wordSize)));
7865     __ stp(j_rarg5, j_rarg4, Address(__ pre(sp, -2 * wordSize)));
7866     __ stp(j_rarg7, j_rarg6, Address(__ pre(sp, -2 * wordSize)));
7867 
7868     int frame_complete = __ offset();
7869 
7870     // Set up last_Java_sp and last_Java_fp
7871     address the_pc = __ pc();
7872     __ set_last_Java_frame(sp, rfp, the_pc, rscratch1);
7873 
7874     // Call runtime
7875     __ mov(c_rarg1, r0);
7876     __ mov(c_rarg0, rthread);
7877 
7878     __ mov(rscratch1, destination);
7879     __ blr(rscratch1);
7880 
7881     oop_maps->add_gc_map(the_pc - start, map);
7882 
7883     __ reset_last_Java_frame(false);
7884 
7885     __ ldp(j_rarg7, j_rarg6, Address(__ post(sp, 2 * wordSize)));
7886     __ ldp(j_rarg5, j_rarg4, Address(__ post(sp, 2 * wordSize)));
7887     __ ldp(j_rarg3, j_rarg2, Address(__ post(sp, 2 * wordSize)));
7888     __ ldp(j_rarg1, j_rarg0, Address(__ post(sp, 2 * wordSize)));
7889 
7890     __ ldpd(j_farg7, j_farg6, Address(__ post(sp, 2 * wordSize)));
7891     __ ldpd(j_farg5, j_farg4, Address(__ post(sp, 2 * wordSize)));
7892     __ ldpd(j_farg3, j_farg2, Address(__ post(sp, 2 * wordSize)));
7893     __ ldpd(j_farg1, j_farg0, Address(__ post(sp, 2 * wordSize)));
7894 
7895     __ leave();
7896 
7897     // check for pending exceptions
7898     Label pending;
7899     __ ldr(rscratch1, Address(rthread, in_bytes(Thread::pending_exception_offset())));
7900     __ cbnz(rscratch1, pending);
7901 
7902     if (has_res) {
7903       __ get_vm_result(r0, rthread);
7904     }
7905 
7906     __ ret(lr);
7907 
7908     __ bind(pending);
7909     __ far_jump(RuntimeAddress(StubRoutines::forward_exception_entry()));
7910 
7911     // -------------
7912     // make sure all code is generated
7913     masm->flush();
7914 
7915     RuntimeStub* stub = RuntimeStub::new_runtime_stub(name, &code, frame_complete, frame_size_in_words, oop_maps, false);
7916     return stub->entry_point();
7917   }
7918 
7919   // Initialization
7920   void generate_initial() {
7921     // Generate initial stubs and initializes the entry points
7922 
7923     // entry points that exist in all platforms Note: This is code
7924     // that could be shared among different platforms - however the
7925     // benefit seems to be smaller than the disadvantage of having a
7926     // much more complicated generator structure. See also comment in
7927     // stubRoutines.hpp.
7928 
7929     StubRoutines::_forward_exception_entry = generate_forward_exception();
7930 
7931     StubRoutines::_call_stub_entry =
7932       generate_call_stub(StubRoutines::_call_stub_return_address);
7933 
7934     // is referenced by megamorphic call
7935     StubRoutines::_catch_exception_entry = generate_catch_exception();
7936 
7937     // Build this early so it's available for the interpreter.
7938     StubRoutines::_throw_StackOverflowError_entry =

7948       StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
7949       StubRoutines::_updateBytesCRC32 = generate_updateBytesCRC32();
7950     }
7951 
7952     if (UseCRC32CIntrinsics) {
7953       StubRoutines::_updateBytesCRC32C = generate_updateBytesCRC32C();
7954     }
7955 
7956     // Disabled until JDK-8210858 is fixed
7957     // if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dlog)) {
7958     //   StubRoutines::_dlog = generate_dlog();
7959     // }
7960 
7961     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dsin)) {
7962       StubRoutines::_dsin = generate_dsin_dcos(/* isCos = */ false);
7963     }
7964 
7965     if (vmIntrinsics::is_intrinsic_available(vmIntrinsics::_dcos)) {
7966       StubRoutines::_dcos = generate_dsin_dcos(/* isCos = */ true);
7967     }
7968 
7969     if (InlineTypeReturnedAsFields) {
7970       StubRoutines::_load_inline_type_fields_in_regs =
7971          generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::load_inline_type_fields_in_regs), "load_inline_type_fields_in_regs", false);
7972       StubRoutines::_store_inline_type_fields_to_buf =
7973          generate_return_value_stub(CAST_FROM_FN_PTR(address, SharedRuntime::store_inline_type_fields_to_buf), "store_inline_type_fields_to_buf", true);
7974     }
7975   }
7976 
7977   void generate_phase1() {
7978     // Continuation stubs:
7979     StubRoutines::_cont_thaw          = generate_cont_thaw();
7980     StubRoutines::_cont_returnBarrier = generate_cont_returnBarrier();
7981     StubRoutines::_cont_returnBarrierExc = generate_cont_returnBarrier_exception();
7982 
7983     JFR_ONLY(StubRoutines::_jfr_write_checkpoint_stub = generate_jfr_write_checkpoint();)
7984     JFR_ONLY(StubRoutines::_jfr_write_checkpoint = StubRoutines::_jfr_write_checkpoint_stub->entry_point();)
7985   }
7986 
7987   void generate_all() {
7988     // support for verify_oop (must happen after universe_init)
7989     if (VerifyOops) {
7990       StubRoutines::_verify_oop_subroutine_entry   = generate_verify_oop();
7991     }
7992     StubRoutines::_throw_AbstractMethodError_entry =
7993       generate_throw_exception("AbstractMethodError throw_exception",
7994                                CAST_FROM_FN_PTR(address,
< prev index next >