4067 // If TLAB is enabled:
4068 // Try to allocate in the TLAB.
4069 // If fails, go to the slow path.
4070 // Initialize the allocation.
4071 // Exit.
4072 //
4073 // Go to slow path.
4074
4075 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4076
4077 if (UseTLAB) {
4078 NOT_LP64(__ get_thread(thread);)
4079 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4080 if (ZeroTLAB) {
4081 // the fields have been already cleared
4082 __ jmp(initialize_header);
4083 }
4084
4085 // The object is initialized before the header. If the object size is
4086 // zero, go directly to the header initialization.
4087 __ decrement(rdx, sizeof(oopDesc));
4088 __ jcc(Assembler::zero, initialize_header);
4089
4090 // Initialize topmost object field, divide rdx by 8, check if odd and
4091 // test if zero.
4092 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4093 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4094
4095 // rdx must have been multiple of 8
4096 #ifdef ASSERT
4097 // make sure rdx was multiple of 8
4098 Label L;
4099 // Ignore partial flag stall after shrl() since it is debug VM
4100 __ jcc(Assembler::carryClear, L);
4101 __ stop("object size is not multiple of 2 - adjust this code");
4102 __ bind(L);
4103 // rdx must be > 0, no extra check needed here
4104 #endif
4105
4106 // initialize remaining object fields: rdx was a multiple of 8
4107 { Label loop;
4108 __ bind(loop);
4109 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4110 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4111 __ decrement(rdx);
4112 __ jcc(Assembler::notZero, loop);
4113 }
4114
4115 // initialize object header only.
4116 __ bind(initialize_header);
4117 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4118 (intptr_t)markWord::prototype().value()); // header
4119 __ pop(rcx); // get saved klass back in the register.
4120 #ifdef _LP64
4121 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4122 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4123 #endif
4124 __ store_klass(rax, rcx, rscratch1); // klass
4125
4126 {
4127 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4128 // Trigger dtrace event for fastpath
4129 __ push(atos);
4130 __ call_VM_leaf(
4131 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4132 __ pop(atos);
4133 }
4134
4135 __ jmp(done);
4136 }
4137
4138 // slow case
4139 __ bind(slow_case);
4140 __ pop(rcx); // restore stack pointer to what it was when we came in.
4141 __ bind(slow_case_no_pop);
4142
4143 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4144 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|
4067 // If TLAB is enabled:
4068 // Try to allocate in the TLAB.
4069 // If fails, go to the slow path.
4070 // Initialize the allocation.
4071 // Exit.
4072 //
4073 // Go to slow path.
4074
4075 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4076
4077 if (UseTLAB) {
4078 NOT_LP64(__ get_thread(thread);)
4079 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4080 if (ZeroTLAB) {
4081 // the fields have been already cleared
4082 __ jmp(initialize_header);
4083 }
4084
4085 // The object is initialized before the header. If the object size is
4086 // zero, go directly to the header initialization.
4087 if (UseCompactObjectHeaders) {
4088 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
4089 __ decrement(rdx, oopDesc::base_offset_in_bytes());
4090 } else {
4091 __ decrement(rdx, sizeof(oopDesc));
4092 }
4093 __ jcc(Assembler::zero, initialize_header);
4094
4095 // Initialize topmost object field, divide rdx by 8, check if odd and
4096 // test if zero.
4097 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4098 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4099
4100 // rdx must have been multiple of 8
4101 #ifdef ASSERT
4102 // make sure rdx was multiple of 8
4103 Label L;
4104 // Ignore partial flag stall after shrl() since it is debug VM
4105 __ jcc(Assembler::carryClear, L);
4106 __ stop("object size is not multiple of 2 - adjust this code");
4107 __ bind(L);
4108 // rdx must be > 0, no extra check needed here
4109 #endif
4110
4111 // initialize remaining object fields: rdx was a multiple of 8
4112 { Label loop;
4113 __ bind(loop);
4114 if (UseCompactObjectHeaders) {
4115 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
4116 int header_size = oopDesc::base_offset_in_bytes();
4117 __ movptr(Address(rax, rdx, Address::times_8, header_size - 1*oopSize), rcx);
4118 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, header_size - 2*oopSize), rcx));
4119 } else {
4120 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4121 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4122 }
4123 __ decrement(rdx);
4124 __ jcc(Assembler::notZero, loop);
4125 }
4126
4127 // initialize object header only.
4128 __ bind(initialize_header);
4129 if (UseCompactObjectHeaders) {
4130 __ pop(rcx); // get saved klass back in the register.
4131 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4132 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
4133 } else {
4134 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4135 (intptr_t)markWord::prototype().value()); // header
4136 __ pop(rcx); // get saved klass back in the register.
4137 #ifdef _LP64
4138 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4139 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4140 #endif
4141 __ store_klass(rax, rcx, rscratch1); // klass
4142 }
4143
4144 {
4145 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4146 // Trigger dtrace event for fastpath
4147 __ push(atos);
4148 __ call_VM_leaf(
4149 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4150 __ pop(atos);
4151 }
4152
4153 __ jmp(done);
4154 }
4155
4156 // slow case
4157 __ bind(slow_case);
4158 __ pop(rcx); // restore stack pointer to what it was when we came in.
4159 __ bind(slow_case_no_pop);
4160
4161 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4162 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|