< prev index next >

src/hotspot/cpu/aarch64/templateTable_aarch64.cpp

Print this page

3218 
3219   const Register method = index;  // method must be rmethod
3220   assert(method == rmethod,
3221          "Method must be rmethod for interpreter calling convention");
3222 
3223   // do the call - the index is actually the method to call
3224   // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3225 
3226   // It's final, need a null check here!
3227   __ null_check(recv);
3228 
3229   // profile this call
3230   __ profile_final_call(r0);
3231   __ profile_arguments_type(r0, method, r4, true);
3232 
3233   __ jump_from_interpreted(method, r0);
3234 
3235   __ bind(notFinal);
3236 
3237   // get receiver klass
3238   __ null_check(recv, oopDesc::klass_offset_in_bytes());
3239   __ load_klass(r0, recv);
3240 
3241   // profile this call
3242   __ profile_virtual_call(r0, rlocals, r3);
3243 
3244   // get target Method & entry point
3245   __ lookup_virtual_method(r0, index, method);
3246   __ profile_arguments_type(r3, method, r4, true);
3247   // FIXME -- this looks completely redundant. is it?
3248   // __ ldr(r3, Address(method, Method::interpreter_entry_offset()));
3249   __ jump_from_interpreted(method, r3);
3250 }
3251 
3252 void TemplateTable::invokevirtual(int byte_no)
3253 {
3254   transition(vtos, vtos);
3255   assert(byte_no == f2_byte, "use this argument");
3256 
3257   prepare_invoke(byte_no, rmethod, noreg, r2, r3);
3258 
3259   // rmethod: index (actually a Method*)

3308   // r3: flags
3309 
3310   // First check for Object case, then private interface method,
3311   // then regular interface method.
3312 
3313   // Special case of invokeinterface called for virtual method of
3314   // java.lang.Object.  See cpCache.cpp for details.
3315   Label notObjectMethod;
3316   __ tbz(r3, ConstantPoolCacheEntry::is_forced_virtual_shift, notObjectMethod);
3317 
3318   invokevirtual_helper(rmethod, r2, r3);
3319   __ bind(notObjectMethod);
3320 
3321   Label no_such_interface;
3322 
3323   // Check for private method invocation - indicated by vfinal
3324   Label notVFinal;
3325   __ tbz(r3, ConstantPoolCacheEntry::is_vfinal_shift, notVFinal);
3326 
3327   // Get receiver klass into r3 - also a null check
3328   __ null_check(r2, oopDesc::klass_offset_in_bytes());
3329   __ load_klass(r3, r2);
3330 
3331   Label subtype;
3332   __ check_klass_subtype(r3, r0, r4, subtype);
3333   // If we get here the typecheck failed
3334   __ b(no_such_interface);
3335   __ bind(subtype);
3336 
3337   __ profile_final_call(r0);
3338   __ profile_arguments_type(r0, rmethod, r4, true);
3339   __ jump_from_interpreted(rmethod, r0);
3340 
3341   __ bind(notVFinal);
3342 
3343   // Get receiver klass into r3 - also a null check
3344   __ restore_locals();
3345   __ null_check(r2, oopDesc::klass_offset_in_bytes());
3346   __ load_klass(r3, r2);
3347 
3348   Label no_such_method;
3349 
3350   // Preserve method for throw_AbstractMethodErrorVerbose.
3351   __ mov(r16, rmethod);
3352   // Receiver subtype check against REFC.
3353   // Superklass in r0. Subklass in r3. Blows rscratch2, r13
3354   __ lookup_interface_method(// inputs: rec. class, interface, itable index
3355                              r3, r0, noreg,
3356                              // outputs: scan temp. reg, scan temp. reg
3357                              rscratch2, r13,
3358                              no_such_interface,
3359                              /*return_method=*/false);
3360 
3361   // profile this call
3362   __ profile_virtual_call(r3, r13, r19);
3363 
3364   // Get declaring interface class from method, and itable index
3365 
3366   __ load_method_holder(r0, rmethod);

3521       __ b(initialize_header);
3522     } else {
3523       // initialize both the header and fields
3524       __ b(initialize_object);
3525     }
3526   } else {
3527     // Allocation in the shared Eden, if allowed.
3528     //
3529     // r3: instance size in bytes
3530     if (allow_shared_alloc) {
3531       __ eden_allocate(r0, r3, 0, r10, slow_case);
3532     }
3533   }
3534 
3535   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3536   // there is an initialize need. Otherwise, skip and go to the slow path.
3537   if (UseTLAB || allow_shared_alloc) {
3538     // The object is initialized before the header.  If the object size is
3539     // zero, go directly to the header initialization.
3540     __ bind(initialize_object);
3541     __ sub(r3, r3, sizeof(oopDesc));
3542     __ cbz(r3, initialize_header);
3543 
3544     // Initialize object fields
3545     {
3546       __ add(r2, r0, sizeof(oopDesc));





3547       Label loop;
3548       __ bind(loop);
3549       __ str(zr, Address(__ post(r2, BytesPerLong)));
3550       __ sub(r3, r3, BytesPerLong);
3551       __ cbnz(r3, loop);
3552     }
3553 
3554     // initialize object header only.
3555     __ bind(initialize_header);
3556     if (UseBiasedLocking) {
3557       __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3558     } else {
3559       __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3560     }
3561     __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3562     __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3563     __ store_klass(r0, r4);      // store klass last
3564 

3565     {
3566       SkipIfEqual skip(_masm, &DTraceAllocProbes, false);
3567       // Trigger dtrace event for fastpath
3568       __ push(atos); // save the return value
3569       __ call_VM_leaf(
3570            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3571       __ pop(atos); // restore the return value
3572 
3573     }
3574     __ b(done);
3575   }
3576 
3577   // slow case
3578   __ bind(slow_case);
3579   __ get_constant_pool(c_rarg1);
3580   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3581   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3582   __ verify_oop(r0);
3583 
3584   // continue

3218 
3219   const Register method = index;  // method must be rmethod
3220   assert(method == rmethod,
3221          "Method must be rmethod for interpreter calling convention");
3222 
3223   // do the call - the index is actually the method to call
3224   // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3225 
3226   // It's final, need a null check here!
3227   __ null_check(recv);
3228 
3229   // profile this call
3230   __ profile_final_call(r0);
3231   __ profile_arguments_type(r0, method, r4, true);
3232 
3233   __ jump_from_interpreted(method, r0);
3234 
3235   __ bind(notFinal);
3236 
3237   // get receiver klass
3238   __ load_klass(r0, recv, true);

3239 
3240   // profile this call
3241   __ profile_virtual_call(r0, rlocals, r3);
3242 
3243   // get target Method & entry point
3244   __ lookup_virtual_method(r0, index, method);
3245   __ profile_arguments_type(r3, method, r4, true);
3246   // FIXME -- this looks completely redundant. is it?
3247   // __ ldr(r3, Address(method, Method::interpreter_entry_offset()));
3248   __ jump_from_interpreted(method, r3);
3249 }
3250 
3251 void TemplateTable::invokevirtual(int byte_no)
3252 {
3253   transition(vtos, vtos);
3254   assert(byte_no == f2_byte, "use this argument");
3255 
3256   prepare_invoke(byte_no, rmethod, noreg, r2, r3);
3257 
3258   // rmethod: index (actually a Method*)

3307   // r3: flags
3308 
3309   // First check for Object case, then private interface method,
3310   // then regular interface method.
3311 
3312   // Special case of invokeinterface called for virtual method of
3313   // java.lang.Object.  See cpCache.cpp for details.
3314   Label notObjectMethod;
3315   __ tbz(r3, ConstantPoolCacheEntry::is_forced_virtual_shift, notObjectMethod);
3316 
3317   invokevirtual_helper(rmethod, r2, r3);
3318   __ bind(notObjectMethod);
3319 
3320   Label no_such_interface;
3321 
3322   // Check for private method invocation - indicated by vfinal
3323   Label notVFinal;
3324   __ tbz(r3, ConstantPoolCacheEntry::is_vfinal_shift, notVFinal);
3325 
3326   // Get receiver klass into r3 - also a null check
3327   __ load_klass(r3, r2, true);

3328 
3329   Label subtype;
3330   __ check_klass_subtype(r3, r0, r4, subtype);
3331   // If we get here the typecheck failed
3332   __ b(no_such_interface);
3333   __ bind(subtype);
3334 
3335   __ profile_final_call(r0);
3336   __ profile_arguments_type(r0, rmethod, r4, true);
3337   __ jump_from_interpreted(rmethod, r0);
3338 
3339   __ bind(notVFinal);
3340 
3341   // Get receiver klass into r3 - also a null check
3342   __ restore_locals();
3343   __ load_klass(r3, r2, true);

3344 
3345   Label no_such_method;
3346 
3347   // Preserve method for throw_AbstractMethodErrorVerbose.
3348   __ mov(r16, rmethod);
3349   // Receiver subtype check against REFC.
3350   // Superklass in r0. Subklass in r3. Blows rscratch2, r13
3351   __ lookup_interface_method(// inputs: rec. class, interface, itable index
3352                              r3, r0, noreg,
3353                              // outputs: scan temp. reg, scan temp. reg
3354                              rscratch2, r13,
3355                              no_such_interface,
3356                              /*return_method=*/false);
3357 
3358   // profile this call
3359   __ profile_virtual_call(r3, r13, r19);
3360 
3361   // Get declaring interface class from method, and itable index
3362 
3363   __ load_method_holder(r0, rmethod);

3518       __ b(initialize_header);
3519     } else {
3520       // initialize both the header and fields
3521       __ b(initialize_object);
3522     }
3523   } else {
3524     // Allocation in the shared Eden, if allowed.
3525     //
3526     // r3: instance size in bytes
3527     if (allow_shared_alloc) {
3528       __ eden_allocate(r0, r3, 0, r10, slow_case);
3529     }
3530   }
3531 
3532   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3533   // there is an initialize need. Otherwise, skip and go to the slow path.
3534   if (UseTLAB || allow_shared_alloc) {
3535     // The object is initialized before the header.  If the object size is
3536     // zero, go directly to the header initialization.
3537     __ bind(initialize_object);
3538     __ sub(r3, r3, oopDesc::base_offset_in_bytes());
3539     __ cbz(r3, initialize_header);
3540 
3541     // Initialize object fields
3542     {
3543       __ add(r2, r0, oopDesc::base_offset_in_bytes());
3544       if (!is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong)) {
3545         __ strw(zr, Address(__ post(r2, BytesPerInt)));
3546         __ sub(r3, r3, BytesPerInt);
3547         __ cbz(r3, initialize_header);
3548       }
3549       Label loop;
3550       __ bind(loop);
3551       __ str(zr, Address(__ post(r2, BytesPerLong)));
3552       __ sub(r3, r3, BytesPerLong);
3553       __ cbnz(r3, loop);
3554     }
3555 
3556     // initialize object header only.
3557     __ bind(initialize_header);
3558     if (UseBiasedLocking || UseCompactObjectHeaders) {
3559       __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3560     } else {
3561       __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3562     }
3563     __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3564     if (!UseCompactObjectHeaders) {
3565       __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3566       __ store_klass(r0, r4);      // store klass last
3567     }
3568     {
3569       SkipIfEqual skip(_masm, &DTraceAllocProbes, false);
3570       // Trigger dtrace event for fastpath
3571       __ push(atos); // save the return value
3572       __ call_VM_leaf(
3573            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3574       __ pop(atos); // restore the return value
3575 
3576     }
3577     __ b(done);
3578   }
3579 
3580   // slow case
3581   __ bind(slow_case);
3582   __ get_constant_pool(c_rarg1);
3583   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3584   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3585   __ verify_oop(r0);
3586 
3587   // continue
< prev index next >