< prev index next >

src/hotspot/cpu/aarch64/templateTable_aarch64.cpp

Print this page

3206 
3207   const Register method = index;  // method must be rmethod
3208   assert(method == rmethod,
3209          "Method must be rmethod for interpreter calling convention");
3210 
3211   // do the call - the index is actually the method to call
3212   // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3213 
3214   // It's final, need a null check here!
3215   __ null_check(recv);
3216 
3217   // profile this call
3218   __ profile_final_call(r0);
3219   __ profile_arguments_type(r0, method, r4, true);
3220 
3221   __ jump_from_interpreted(method, r0);
3222 
3223   __ bind(notFinal);
3224 
3225   // get receiver klass
3226   __ null_check(recv, oopDesc::klass_offset_in_bytes());
3227   __ load_klass(r0, recv);
3228 
3229   // profile this call
3230   __ profile_virtual_call(r0, rlocals, r3);
3231 
3232   // get target Method & entry point
3233   __ lookup_virtual_method(r0, index, method);
3234   __ profile_arguments_type(r3, method, r4, true);
3235   // FIXME -- this looks completely redundant. is it?
3236   // __ ldr(r3, Address(method, Method::interpreter_entry_offset()));
3237   __ jump_from_interpreted(method, r3);
3238 }
3239 
3240 void TemplateTable::invokevirtual(int byte_no)
3241 {
3242   transition(vtos, vtos);
3243   assert(byte_no == f2_byte, "use this argument");
3244 
3245   prepare_invoke(byte_no, rmethod, noreg, r2, r3);
3246 
3247   // rmethod: index (actually a Method*)

3296   // r3: flags
3297 
3298   // First check for Object case, then private interface method,
3299   // then regular interface method.
3300 
3301   // Special case of invokeinterface called for virtual method of
3302   // java.lang.Object.  See cpCache.cpp for details.
3303   Label notObjectMethod;
3304   __ tbz(r3, ConstantPoolCacheEntry::is_forced_virtual_shift, notObjectMethod);
3305 
3306   invokevirtual_helper(rmethod, r2, r3);
3307   __ bind(notObjectMethod);
3308 
3309   Label no_such_interface;
3310 
3311   // Check for private method invocation - indicated by vfinal
3312   Label notVFinal;
3313   __ tbz(r3, ConstantPoolCacheEntry::is_vfinal_shift, notVFinal);
3314 
3315   // Get receiver klass into r3 - also a null check
3316   __ null_check(r2, oopDesc::klass_offset_in_bytes());
3317   __ load_klass(r3, r2);
3318 
3319   Label subtype;
3320   __ check_klass_subtype(r3, r0, r4, subtype);
3321   // If we get here the typecheck failed
3322   __ b(no_such_interface);
3323   __ bind(subtype);
3324 
3325   __ profile_final_call(r0);
3326   __ profile_arguments_type(r0, rmethod, r4, true);
3327   __ jump_from_interpreted(rmethod, r0);
3328 
3329   __ bind(notVFinal);
3330 
3331   // Get receiver klass into r3 - also a null check
3332   __ restore_locals();
3333   __ null_check(r2, oopDesc::klass_offset_in_bytes());
3334   __ load_klass(r3, r2);
3335 
3336   Label no_such_method;
3337 
3338   // Preserve method for throw_AbstractMethodErrorVerbose.
3339   __ mov(r16, rmethod);
3340   // Receiver subtype check against REFC.
3341   // Superklass in r0. Subklass in r3. Blows rscratch2, r13
3342   __ lookup_interface_method(// inputs: rec. class, interface, itable index
3343                              r3, r0, noreg,
3344                              // outputs: scan temp. reg, scan temp. reg
3345                              rscratch2, r13,
3346                              no_such_interface,
3347                              /*return_method=*/false);
3348 
3349   // profile this call
3350   __ profile_virtual_call(r3, r13, r19);
3351 
3352   // Get declaring interface class from method, and itable index
3353 
3354   __ load_method_holder(r0, rmethod);

3509       __ b(initialize_header);
3510     } else {
3511       // initialize both the header and fields
3512       __ b(initialize_object);
3513     }
3514   } else {
3515     // Allocation in the shared Eden, if allowed.
3516     //
3517     // r3: instance size in bytes
3518     if (allow_shared_alloc) {
3519       __ eden_allocate(r0, r3, 0, r10, slow_case);
3520     }
3521   }
3522 
3523   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3524   // there is an initialize need. Otherwise, skip and go to the slow path.
3525   if (UseTLAB || allow_shared_alloc) {
3526     // The object is initialized before the header.  If the object size is
3527     // zero, go directly to the header initialization.
3528     __ bind(initialize_object);
3529     __ sub(r3, r3, sizeof(oopDesc));
3530     __ cbz(r3, initialize_header);
3531 
3532     // Initialize object fields
3533     {
3534       __ add(r2, r0, sizeof(oopDesc));





3535       Label loop;
3536       __ bind(loop);
3537       __ str(zr, Address(__ post(r2, BytesPerLong)));
3538       __ sub(r3, r3, BytesPerLong);
3539       __ cbnz(r3, loop);
3540     }
3541 
3542     // initialize object header only.
3543     __ bind(initialize_header);
3544     if (UseBiasedLocking) {
3545       __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3546     } else {
3547       __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3548     }
3549     __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3550     __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3551     __ store_klass(r0, r4);      // store klass last
3552 

3553     {
3554       SkipIfEqual skip(_masm, &DTraceAllocProbes, false);
3555       // Trigger dtrace event for fastpath
3556       __ push(atos); // save the return value
3557       __ call_VM_leaf(
3558            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3559       __ pop(atos); // restore the return value
3560 
3561     }
3562     __ b(done);
3563   }
3564 
3565   // slow case
3566   __ bind(slow_case);
3567   __ get_constant_pool(c_rarg1);
3568   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3569   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3570   __ verify_oop(r0);
3571 
3572   // continue

3206 
3207   const Register method = index;  // method must be rmethod
3208   assert(method == rmethod,
3209          "Method must be rmethod for interpreter calling convention");
3210 
3211   // do the call - the index is actually the method to call
3212   // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3213 
3214   // It's final, need a null check here!
3215   __ null_check(recv);
3216 
3217   // profile this call
3218   __ profile_final_call(r0);
3219   __ profile_arguments_type(r0, method, r4, true);
3220 
3221   __ jump_from_interpreted(method, r0);
3222 
3223   __ bind(notFinal);
3224 
3225   // get receiver klass
3226   __ load_klass(r0, recv, true);

3227 
3228   // profile this call
3229   __ profile_virtual_call(r0, rlocals, r3);
3230 
3231   // get target Method & entry point
3232   __ lookup_virtual_method(r0, index, method);
3233   __ profile_arguments_type(r3, method, r4, true);
3234   // FIXME -- this looks completely redundant. is it?
3235   // __ ldr(r3, Address(method, Method::interpreter_entry_offset()));
3236   __ jump_from_interpreted(method, r3);
3237 }
3238 
3239 void TemplateTable::invokevirtual(int byte_no)
3240 {
3241   transition(vtos, vtos);
3242   assert(byte_no == f2_byte, "use this argument");
3243 
3244   prepare_invoke(byte_no, rmethod, noreg, r2, r3);
3245 
3246   // rmethod: index (actually a Method*)

3295   // r3: flags
3296 
3297   // First check for Object case, then private interface method,
3298   // then regular interface method.
3299 
3300   // Special case of invokeinterface called for virtual method of
3301   // java.lang.Object.  See cpCache.cpp for details.
3302   Label notObjectMethod;
3303   __ tbz(r3, ConstantPoolCacheEntry::is_forced_virtual_shift, notObjectMethod);
3304 
3305   invokevirtual_helper(rmethod, r2, r3);
3306   __ bind(notObjectMethod);
3307 
3308   Label no_such_interface;
3309 
3310   // Check for private method invocation - indicated by vfinal
3311   Label notVFinal;
3312   __ tbz(r3, ConstantPoolCacheEntry::is_vfinal_shift, notVFinal);
3313 
3314   // Get receiver klass into r3 - also a null check
3315   __ load_klass(r3, r2, true);

3316 
3317   Label subtype;
3318   __ check_klass_subtype(r3, r0, r4, subtype);
3319   // If we get here the typecheck failed
3320   __ b(no_such_interface);
3321   __ bind(subtype);
3322 
3323   __ profile_final_call(r0);
3324   __ profile_arguments_type(r0, rmethod, r4, true);
3325   __ jump_from_interpreted(rmethod, r0);
3326 
3327   __ bind(notVFinal);
3328 
3329   // Get receiver klass into r3 - also a null check
3330   __ restore_locals();
3331   __ load_klass(r3, r2, true);

3332 
3333   Label no_such_method;
3334 
3335   // Preserve method for throw_AbstractMethodErrorVerbose.
3336   __ mov(r16, rmethod);
3337   // Receiver subtype check against REFC.
3338   // Superklass in r0. Subklass in r3. Blows rscratch2, r13
3339   __ lookup_interface_method(// inputs: rec. class, interface, itable index
3340                              r3, r0, noreg,
3341                              // outputs: scan temp. reg, scan temp. reg
3342                              rscratch2, r13,
3343                              no_such_interface,
3344                              /*return_method=*/false);
3345 
3346   // profile this call
3347   __ profile_virtual_call(r3, r13, r19);
3348 
3349   // Get declaring interface class from method, and itable index
3350 
3351   __ load_method_holder(r0, rmethod);

3506       __ b(initialize_header);
3507     } else {
3508       // initialize both the header and fields
3509       __ b(initialize_object);
3510     }
3511   } else {
3512     // Allocation in the shared Eden, if allowed.
3513     //
3514     // r3: instance size in bytes
3515     if (allow_shared_alloc) {
3516       __ eden_allocate(r0, r3, 0, r10, slow_case);
3517     }
3518   }
3519 
3520   // If UseTLAB or allow_shared_alloc are true, the object is created above and
3521   // there is an initialize need. Otherwise, skip and go to the slow path.
3522   if (UseTLAB || allow_shared_alloc) {
3523     // The object is initialized before the header.  If the object size is
3524     // zero, go directly to the header initialization.
3525     __ bind(initialize_object);
3526     __ sub(r3, r3, oopDesc::base_offset_in_bytes());
3527     __ cbz(r3, initialize_header);
3528 
3529     // Initialize object fields
3530     {
3531       __ add(r2, r0, oopDesc::base_offset_in_bytes());
3532       if (!is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong)) {
3533         __ strw(zr, Address(__ post(r2, BytesPerInt)));
3534         __ sub(r3, r3, BytesPerInt);
3535         __ cbz(r3, initialize_header);
3536       }
3537       Label loop;
3538       __ bind(loop);
3539       __ str(zr, Address(__ post(r2, BytesPerLong)));
3540       __ sub(r3, r3, BytesPerLong);
3541       __ cbnz(r3, loop);
3542     }
3543 
3544     // initialize object header only.
3545     __ bind(initialize_header);
3546     if (UseBiasedLocking || UseCompactObjectHeaders) {
3547       __ ldr(rscratch1, Address(r4, Klass::prototype_header_offset()));
3548     } else {
3549       __ mov(rscratch1, (intptr_t)markWord::prototype().value());
3550     }
3551     __ str(rscratch1, Address(r0, oopDesc::mark_offset_in_bytes()));
3552     if (!UseCompactObjectHeaders) {
3553       __ store_klass_gap(r0, zr);  // zero klass gap for compressed oops
3554       __ store_klass(r0, r4);      // store klass last
3555     }
3556     {
3557       SkipIfEqual skip(_masm, &DTraceAllocProbes, false);
3558       // Trigger dtrace event for fastpath
3559       __ push(atos); // save the return value
3560       __ call_VM_leaf(
3561            CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), r0);
3562       __ pop(atos); // restore the return value
3563 
3564     }
3565     __ b(done);
3566   }
3567 
3568   // slow case
3569   __ bind(slow_case);
3570   __ get_constant_pool(c_rarg1);
3571   __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3572   call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3573   __ verify_oop(r0);
3574 
3575   // continue
< prev index next >