4068 // If TLAB is enabled:
4069 // Try to allocate in the TLAB.
4070 // If fails, go to the slow path.
4071 // Initialize the allocation.
4072 // Exit.
4073 //
4074 // Go to slow path.
4075
4076 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4077
4078 if (UseTLAB) {
4079 NOT_LP64(__ get_thread(thread);)
4080 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4081 if (ZeroTLAB) {
4082 // the fields have been already cleared
4083 __ jmp(initialize_header);
4084 }
4085
4086 // The object is initialized before the header. If the object size is
4087 // zero, go directly to the header initialization.
4088 __ decrement(rdx, sizeof(oopDesc));
4089 __ jcc(Assembler::zero, initialize_header);
4090
4091 // Initialize topmost object field, divide rdx by 8, check if odd and
4092 // test if zero.
4093 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4094 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4095
4096 // rdx must have been multiple of 8
4097 #ifdef ASSERT
4098 // make sure rdx was multiple of 8
4099 Label L;
4100 // Ignore partial flag stall after shrl() since it is debug VM
4101 __ jcc(Assembler::carryClear, L);
4102 __ stop("object size is not multiple of 2 - adjust this code");
4103 __ bind(L);
4104 // rdx must be > 0, no extra check needed here
4105 #endif
4106
4107 // initialize remaining object fields: rdx was a multiple of 8
4108 { Label loop;
4109 __ bind(loop);
4110 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4111 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4112 __ decrement(rdx);
4113 __ jcc(Assembler::notZero, loop);
4114 }
4115
4116 // initialize object header only.
4117 __ bind(initialize_header);
4118 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4119 (intptr_t)markWord::prototype().value()); // header
4120 __ pop(rcx); // get saved klass back in the register.
4121 #ifdef _LP64
4122 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4123 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4124 #endif
4125 __ store_klass(rax, rcx, rscratch1); // klass
4126
4127 {
4128 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4129 // Trigger dtrace event for fastpath
4130 __ push(atos);
4131 __ call_VM_leaf(
4132 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4133 __ pop(atos);
4134 }
4135
4136 __ jmp(done);
4137 }
4138
4139 // slow case
4140 __ bind(slow_case);
4141 __ pop(rcx); // restore stack pointer to what it was when we came in.
4142 __ bind(slow_case_no_pop);
4143
4144 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4145 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|
4068 // If TLAB is enabled:
4069 // Try to allocate in the TLAB.
4070 // If fails, go to the slow path.
4071 // Initialize the allocation.
4072 // Exit.
4073 //
4074 // Go to slow path.
4075
4076 const Register thread = LP64_ONLY(r15_thread) NOT_LP64(rcx);
4077
4078 if (UseTLAB) {
4079 NOT_LP64(__ get_thread(thread);)
4080 __ tlab_allocate(thread, rax, rdx, 0, rcx, rbx, slow_case);
4081 if (ZeroTLAB) {
4082 // the fields have been already cleared
4083 __ jmp(initialize_header);
4084 }
4085
4086 // The object is initialized before the header. If the object size is
4087 // zero, go directly to the header initialization.
4088 if (UseCompactObjectHeaders) {
4089 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
4090 __ decrement(rdx, oopDesc::base_offset_in_bytes());
4091 } else {
4092 __ decrement(rdx, sizeof(oopDesc));
4093 }
4094 __ jcc(Assembler::zero, initialize_header);
4095
4096 // Initialize topmost object field, divide rdx by 8, check if odd and
4097 // test if zero.
4098 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
4099 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
4100
4101 // rdx must have been multiple of 8
4102 #ifdef ASSERT
4103 // make sure rdx was multiple of 8
4104 Label L;
4105 // Ignore partial flag stall after shrl() since it is debug VM
4106 __ jcc(Assembler::carryClear, L);
4107 __ stop("object size is not multiple of 2 - adjust this code");
4108 __ bind(L);
4109 // rdx must be > 0, no extra check needed here
4110 #endif
4111
4112 // initialize remaining object fields: rdx was a multiple of 8
4113 { Label loop;
4114 __ bind(loop);
4115 if (UseCompactObjectHeaders) {
4116 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
4117 int header_size = oopDesc::base_offset_in_bytes();
4118 __ movptr(Address(rax, rdx, Address::times_8, header_size - 1*oopSize), rcx);
4119 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, header_size - 2*oopSize), rcx));
4120 } else {
4121 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
4122 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
4123 }
4124 __ decrement(rdx);
4125 __ jcc(Assembler::notZero, loop);
4126 }
4127
4128 // initialize object header only.
4129 __ bind(initialize_header);
4130 if (UseCompactObjectHeaders) {
4131 __ pop(rcx); // get saved klass back in the register.
4132 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
4133 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
4134 } else {
4135 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
4136 (intptr_t)markWord::prototype().value()); // header
4137 __ pop(rcx); // get saved klass back in the register.
4138 #ifdef _LP64
4139 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
4140 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
4141 #endif
4142 __ store_klass(rax, rcx, rscratch1); // klass
4143 }
4144
4145 {
4146 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0, rscratch1);
4147 // Trigger dtrace event for fastpath
4148 __ push(atos);
4149 __ call_VM_leaf(
4150 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
4151 __ pop(atos);
4152 }
4153
4154 __ jmp(done);
4155 }
4156
4157 // slow case
4158 __ bind(slow_case);
4159 __ pop(rcx); // restore stack pointer to what it was when we came in.
4160 __ bind(slow_case_no_pop);
4161
4162 Register rarg1 = LP64_ONLY(c_rarg1) NOT_LP64(rax);
4163 Register rarg2 = LP64_ONLY(c_rarg2) NOT_LP64(rdx);
|