< prev index next > src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp
Print this page
#define BLOCK_COMMENT(str) block_comment(str)
#define STOP(error) block_comment(error); stop(error)
#endif
// C2 compiled method's prolog code.
- void C2_MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b, bool is_stub) {
+ void C2_MacroAssembler::verified_entry(Compile* C, int sp_inc) {
+ if (C->clinit_barrier_on_entry()) {
+ assert(VM_Version::supports_fast_class_init_checks(), "sanity");
+ assert(!C->method()->holder()->is_not_initialized(), "initialization should have been started");
+
+ Label L_skip_barrier;
+ Register klass = rscratch1;
+
+ mov_metadata(klass, C->method()->holder()->constant_encoding());
+ clinit_barrier(klass, r15_thread, &L_skip_barrier /*L_fast_path*/);
+
+ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
+
+ bind(L_skip_barrier);
+ }
+
+ int framesize = C->output()->frame_size_in_bytes();
+ int bangsize = C->output()->bang_size_in_bytes();
+ bool fp_mode_24b = false;
+ int stack_bang_size = C->output()->need_stack_bang(bangsize) ? bangsize : 0;
// WARNING: Initial instruction MUST be 5 bytes or longer so that
// NativeJump::patch_verified_entry will be able to patch out the entry
// code safely. The push to verify stack depth is ok at 5 bytes,
// the frame allocation can be either 3 or 6 bytes. So if we don't do
addptr(rbp, framesize);
}
}
}
+ if (C->needs_stack_repair()) {
+ // Save stack increment just below the saved rbp (also account for fixed framesize and rbp)
+ assert((sp_inc & (StackAlignmentInBytes-1)) == 0, "stack increment not aligned");
+ movptr(Address(rsp, framesize - wordSize), sp_inc + framesize + wordSize);
+ }
+
if (VerifyStackAtCalls) { // Majik cookie to verify stack depth
framesize -= wordSize;
movptr(Address(rsp, framesize), (int32_t)0xbadb100d);
}
jcc(Assembler::equal, L);
STOP("Stack is not properly aligned!");
bind(L);
}
#endif
+ }
- if (!is_stub) {
- BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
- #ifdef _LP64
- if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) {
- // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
- Label dummy_slow_path;
- Label dummy_continuation;
- Label* slow_path = &dummy_slow_path;
- Label* continuation = &dummy_continuation;
- if (!Compile::current()->output()->in_scratch_emit_size()) {
- // Use real labels from actual stub when not emitting code for the purpose of measuring its size
- C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
- Compile::current()->output()->add_stub(stub);
- slow_path = &stub->entry();
- continuation = &stub->continuation();
- }
- bs->nmethod_entry_barrier(this, slow_path, continuation);
+ void C2_MacroAssembler::entry_barrier() {
+ BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
+ #ifdef _LP64
+ if (BarrierSet::barrier_set()->barrier_set_nmethod() != nullptr) {
+ // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
+ Label dummy_slow_path;
+ Label dummy_continuation;
+ Label* slow_path = &dummy_slow_path;
+ Label* continuation = &dummy_continuation;
+ if (!Compile::current()->output()->in_scratch_emit_size()) {
+ // Use real labels from actual stub when not emitting code for the purpose of measuring its size
+ C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
+ Compile::current()->output()->add_stub(stub);
+ slow_path = &stub->entry();
+ continuation = &stub->continuation();
}
+ bs->nmethod_entry_barrier(this, slow_path, continuation);
+ }
#else
- // Don't bother with out-of-line nmethod entry barrier stub for x86_32.
- bs->nmethod_entry_barrier(this, nullptr /* slow_path */, nullptr /* continuation */);
+ // Don't bother with out-of-line nmethod entry barrier stub for x86_32.
+ bs->nmethod_entry_barrier(this, nullptr /* slow_path */, nullptr /* continuation */);
#endif
- }
}
inline Assembler::AvxVectorLen C2_MacroAssembler::vector_length_encoding(int vlen_in_bytes) {
switch (vlen_in_bytes) {
case 4: // fall-through
testptr(objReg, objReg);
} else {
assert(LockingMode == LM_LEGACY, "must be");
// Attempt stack-locking ...
orptr (tmpReg, markWord::unlocked_value);
+ if (EnableValhalla) {
+ // Mask inline_type bit such that we go to the slow path if object is an inline type
+ andptr(tmpReg, ~((int) markWord::inline_type_bit_in_place));
+ }
movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS
lock();
cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Updates tmpReg
jcc(Assembler::equal, COUNT); // Success
< prev index next >