3640
3641 const Register method = index; // method must be rbx
3642 assert(method == rbx,
3643 "Method* must be rbx for interpreter calling convention");
3644
3645 // do the call - the index is actually the method to call
3646 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3647
3648 // It's final, need a null check here!
3649 __ null_check(recv);
3650
3651 // profile this call
3652 __ profile_final_call(rax);
3653 __ profile_arguments_type(rax, method, rbcp, true);
3654
3655 __ jump_from_interpreted(method, rax);
3656
3657 __ bind(notFinal);
3658
3659 // get receiver klass
3660 __ null_check(recv, oopDesc::klass_offset_in_bytes());
3661 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3662 __ load_klass(rax, recv, tmp_load_klass);
3663
3664 // profile this call
3665 __ profile_virtual_call(rax, rlocals, rdx);
3666 // get target Method* & entry point
3667 __ lookup_virtual_method(rax, index, method);
3668
3669 __ profile_arguments_type(rdx, method, rbcp, true);
3670 __ jump_from_interpreted(method, rdx);
3671 }
3672
3673 void TemplateTable::invokevirtual(int byte_no) {
3674 transition(vtos, vtos);
3675 assert(byte_no == f2_byte, "use this argument");
3676 prepare_invoke(byte_no,
3677 rbx, // method or vtable index
3678 noreg, // unused itable index
3679 rcx, rdx); // recv, flags
3680
3681 // rbx: index
3682 // rcx: receiver
3733 // Special case of invokeinterface called for virtual method of
3734 // java.lang.Object. See cpCache.cpp for details.
3735 Label notObjectMethod;
3736 __ movl(rlocals, rdx);
3737 __ andl(rlocals, (1 << ConstantPoolCacheEntry::is_forced_virtual_shift));
3738 __ jcc(Assembler::zero, notObjectMethod);
3739 invokevirtual_helper(rbx, rcx, rdx);
3740 // no return from above
3741 __ bind(notObjectMethod);
3742
3743 Label no_such_interface; // for receiver subtype check
3744 Register recvKlass; // used for exception processing
3745
3746 // Check for private method invocation - indicated by vfinal
3747 Label notVFinal;
3748 __ movl(rlocals, rdx);
3749 __ andl(rlocals, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
3750 __ jcc(Assembler::zero, notVFinal);
3751
3752 // Get receiver klass into rlocals - also a null check
3753 __ null_check(rcx, oopDesc::klass_offset_in_bytes());
3754 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3755 __ load_klass(rlocals, rcx, tmp_load_klass);
3756
3757 Label subtype;
3758 __ check_klass_subtype(rlocals, rax, rbcp, subtype);
3759 // If we get here the typecheck failed
3760 recvKlass = rdx;
3761 __ mov(recvKlass, rlocals); // shuffle receiver class for exception use
3762 __ jmp(no_such_interface);
3763
3764 __ bind(subtype);
3765
3766 // do the call - rbx is actually the method to call
3767
3768 __ profile_final_call(rdx);
3769 __ profile_arguments_type(rdx, rbx, rbcp, true);
3770
3771 __ jump_from_interpreted(rbx, rdx);
3772 // no return from above
3773 __ bind(notVFinal);
3774
3775 // Get receiver klass into rdx - also a null check
3776 __ restore_locals(); // restore r14
3777 __ null_check(rcx, oopDesc::klass_offset_in_bytes());
3778 __ load_klass(rdx, rcx, tmp_load_klass);
3779
3780 Label no_such_method;
3781
3782 // Preserve method for throw_AbstractMethodErrorVerbose.
3783 __ mov(rcx, rbx);
3784 // Receiver subtype check against REFC.
3785 // Superklass in rax. Subklass in rdx. Blows rcx, rdi.
3786 __ lookup_interface_method(// inputs: rec. class, interface, itable index
3787 rdx, rax, noreg,
3788 // outputs: scan temp. reg, scan temp. reg
3789 rbcp, rlocals,
3790 no_such_interface,
3791 /*return_method=*/false);
3792
3793 // profile this call
3794 __ restore_bcp(); // rbcp was destroyed by receiver type check
3795 __ profile_virtual_call(rdx, rbcp, rlocals);
3796
3797 // Get declaring interface class from method, and itable index
3798 __ load_method_holder(rax, rbx);
3976 if (ZeroTLAB) {
3977 // the fields have been already cleared
3978 __ jmp(initialize_header);
3979 } else {
3980 // initialize both the header and fields
3981 __ jmp(initialize_object);
3982 }
3983 } else {
3984 // Allocation in the shared Eden, if allowed.
3985 //
3986 // rdx: instance size in bytes
3987 __ eden_allocate(thread, rax, rdx, 0, rbx, slow_case);
3988 }
3989
3990 // If UseTLAB or allow_shared_alloc are true, the object is created above and
3991 // there is an initialize need. Otherwise, skip and go to the slow path.
3992 if (UseTLAB || allow_shared_alloc) {
3993 // The object is initialized before the header. If the object size is
3994 // zero, go directly to the header initialization.
3995 __ bind(initialize_object);
3996 __ decrement(rdx, sizeof(oopDesc));
3997 __ jcc(Assembler::zero, initialize_header);
3998
3999 // Initialize topmost object field, divide rdx by 8, check if odd and
4000 // test if zero.
4001 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4002 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4003
4004 // rdx must have been multiple of 8
4005 #ifdef ASSERT
4006 // make sure rdx was multiple of 8
4007 Label L;
4008 // Ignore partial flag stall after shrl() since it is debug VM
4009 __ jcc(Assembler::carryClear, L);
4010 __ stop("object size is not multiple of 2 - adjust this code");
4011 __ bind(L);
4012 // rdx must be > 0, no extra check needed here
4013 #endif
4014
4015 // initialize remaining object fields: rdx was a multiple of 8
4016 { Label loop;
4017 __ bind(loop);
4018 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4019 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4020 __ decrement(rdx);
4021 __ jcc(Assembler::notZero, loop);
4022 }
4023
4024 // initialize object header only.
4025 __ bind(initialize_header);
4026 if (UseBiasedLocking) {
4027 __ pop(rcx); // get saved klass back in the register.
4028 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4029 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
4030 } else {
4031 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4032 (intptr_t)markWord::prototype().value()); // header
4033 __ pop(rcx); // get saved klass back in the register.
4034 }
4035 #ifdef _LP64
4036 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4037 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4038 #endif
4039 Register tmp_store_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
4040 __ store_klass(rax, rcx, tmp_store_klass); // klass
4041
4042 {
4043 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4044 // Trigger dtrace event for fastpath
4045 __ push(atos);
4046 __ call_VM_leaf(
4047 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4048 __ pop(atos);
4049 }
4050
4051 __ jmp(done);
4052 }
4053
4054 // slow case
4055 __ bind(slow_case);
4056 __ pop(rcx); // restore stack pointer to what it was when we came in.
4057 __ bind(slow_case_no_pop);
4058
4059 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4060 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|
3640
3641 const Register method = index; // method must be rbx
3642 assert(method == rbx,
3643 "Method* must be rbx for interpreter calling convention");
3644
3645 // do the call - the index is actually the method to call
3646 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3647
3648 // It's final, need a null check here!
3649 __ null_check(recv);
3650
3651 // profile this call
3652 __ profile_final_call(rax);
3653 __ profile_arguments_type(rax, method, rbcp, true);
3654
3655 __ jump_from_interpreted(method, rax);
3656
3657 __ bind(notFinal);
3658
3659 // get receiver klass
3660 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3661 __ load_klass(rax, recv, tmp_load_klass, true);
3662
3663 // profile this call
3664 __ profile_virtual_call(rax, rlocals, rdx);
3665 // get target Method* & entry point
3666 __ lookup_virtual_method(rax, index, method);
3667
3668 __ profile_arguments_type(rdx, method, rbcp, true);
3669 __ jump_from_interpreted(method, rdx);
3670 }
3671
3672 void TemplateTable::invokevirtual(int byte_no) {
3673 transition(vtos, vtos);
3674 assert(byte_no == f2_byte, "use this argument");
3675 prepare_invoke(byte_no,
3676 rbx, // method or vtable index
3677 noreg, // unused itable index
3678 rcx, rdx); // recv, flags
3679
3680 // rbx: index
3681 // rcx: receiver
3732 // Special case of invokeinterface called for virtual method of
3733 // java.lang.Object. See cpCache.cpp for details.
3734 Label notObjectMethod;
3735 __ movl(rlocals, rdx);
3736 __ andl(rlocals, (1 << ConstantPoolCacheEntry::is_forced_virtual_shift));
3737 __ jcc(Assembler::zero, notObjectMethod);
3738 invokevirtual_helper(rbx, rcx, rdx);
3739 // no return from above
3740 __ bind(notObjectMethod);
3741
3742 Label no_such_interface; // for receiver subtype check
3743 Register recvKlass; // used for exception processing
3744
3745 // Check for private method invocation - indicated by vfinal
3746 Label notVFinal;
3747 __ movl(rlocals, rdx);
3748 __ andl(rlocals, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
3749 __ jcc(Assembler::zero, notVFinal);
3750
3751 // Get receiver klass into rlocals - also a null check
3752 Register tmp_load_klass = LP64_ONLY(rscratch1) NOT_LP64(noreg);
3753 __ load_klass(rlocals, rcx, tmp_load_klass, true);
3754
3755 Label subtype;
3756 __ check_klass_subtype(rlocals, rax, rbcp, subtype);
3757 // If we get here the typecheck failed
3758 recvKlass = rdx;
3759 __ mov(recvKlass, rlocals); // shuffle receiver class for exception use
3760 __ jmp(no_such_interface);
3761
3762 __ bind(subtype);
3763
3764 // do the call - rbx is actually the method to call
3765
3766 __ profile_final_call(rdx);
3767 __ profile_arguments_type(rdx, rbx, rbcp, true);
3768
3769 __ jump_from_interpreted(rbx, rdx);
3770 // no return from above
3771 __ bind(notVFinal);
3772
3773 // Get receiver klass into rdx - also a null check
3774 __ restore_locals(); // restore r14
3775 __ load_klass(rdx, rcx, tmp_load_klass, true);
3776
3777 Label no_such_method;
3778
3779 // Preserve method for throw_AbstractMethodErrorVerbose.
3780 __ mov(rcx, rbx);
3781 // Receiver subtype check against REFC.
3782 // Superklass in rax. Subklass in rdx. Blows rcx, rdi.
3783 __ lookup_interface_method(// inputs: rec. class, interface, itable index
3784 rdx, rax, noreg,
3785 // outputs: scan temp. reg, scan temp. reg
3786 rbcp, rlocals,
3787 no_such_interface,
3788 /*return_method=*/false);
3789
3790 // profile this call
3791 __ restore_bcp(); // rbcp was destroyed by receiver type check
3792 __ profile_virtual_call(rdx, rbcp, rlocals);
3793
3794 // Get declaring interface class from method, and itable index
3795 __ load_method_holder(rax, rbx);
3973 if (ZeroTLAB) {
3974 // the fields have been already cleared
3975 __ jmp(initialize_header);
3976 } else {
3977 // initialize both the header and fields
3978 __ jmp(initialize_object);
3979 }
3980 } else {
3981 // Allocation in the shared Eden, if allowed.
3982 //
3983 // rdx: instance size in bytes
3984 __ eden_allocate(thread, rax, rdx, 0, rbx, slow_case);
3985 }
3986
3987 // If UseTLAB or allow_shared_alloc are true, the object is created above and
3988 // there is an initialize need. Otherwise, skip and go to the slow path.
3989 if (UseTLAB || allow_shared_alloc) {
3990 // The object is initialized before the header. If the object size is
3991 // zero, go directly to the header initialization.
3992 __ bind(initialize_object);
3993 int header_size = align_up(oopDesc::base_offset_in_bytes(), BytesPerLong);
3994 __ decrement(rdx, header_size);
3995 __ jcc(Assembler::zero, initialize_header);
3996
3997 // Initialize topmost object field, divide rdx by 8, check if odd and
3998 // test if zero.
3999 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4000 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4001
4002 // rdx must have been multiple of 8
4003 #ifdef ASSERT
4004 // make sure rdx was multiple of 8
4005 Label L;
4006 // Ignore partial flag stall after shrl() since it is debug VM
4007 __ jcc(Assembler::carryClear, L);
4008 __ stop("object size is not multiple of 2 - adjust this code");
4009 __ bind(L);
4010 // rdx must be > 0, no extra check needed here
4011 #endif
4012
4013 // initialize remaining object fields: rdx was a multiple of 8
4014 { Label loop;
4015 __ bind(loop);
4016 __ movptr(Address(rax, rdx, Address::times_8, header_size - 1*oopSize), rcx);
4017 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, header_size - 2*oopSize), rcx));
4018 __ decrement(rdx);
4019 __ jcc(Assembler::notZero, loop);
4020 }
4021
4022 // initialize object header only.
4023 __ bind(initialize_header);
4024 if (UseBiasedLocking || UseCompactObjectHeaders) {
4025 __ pop(rcx); // get saved klass back in the register.
4026 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4027 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
4028 } else {
4029 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
4030 (intptr_t)markWord::prototype().value()); // header
4031 __ pop(rcx); // get saved klass back in the register.
4032 }
4033 if (!UseCompactObjectHeaders) {
4034 #ifdef _LP64
4035 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4036 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4037 #endif
4038 __ store_klass(rax, rcx, rscratch1); // klass
4039 }
4040
4041 {
4042 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
4043 // Trigger dtrace event for fastpath
4044 __ push(atos);
4045 __ call_VM_leaf(
4046 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
4047 __ pop(atos);
4048 }
4049
4050 __ jmp(done);
4051 }
4052
4053 // slow case
4054 __ bind(slow_case);
4055 __ pop(rcx); // restore stack pointer to what it was when we came in.
4056 __ bind(slow_case_no_pop);
4057
4058 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4059 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|