32 #include "opto/output.hpp"
33 #include "opto/opcodes.hpp"
34 #include "opto/subnode.hpp"
35 #include "runtime/globals.hpp"
36 #include "runtime/objectMonitor.hpp"
37 #include "runtime/stubRoutines.hpp"
38 #include "utilities/checkedCast.hpp"
39 #include "utilities/globalDefinitions.hpp"
40 #include "utilities/powerOfTwo.hpp"
41 #include "utilities/sizes.hpp"
42
43 #ifdef PRODUCT
44 #define BLOCK_COMMENT(str) /* nothing */
45 #define STOP(error) stop(error)
46 #else
47 #define BLOCK_COMMENT(str) block_comment(str)
48 #define STOP(error) block_comment(error); stop(error)
49 #endif
50
51 // C2 compiled method's prolog code.
52 void C2_MacroAssembler::verified_entry(int framesize, int stack_bang_size, bool fp_mode_24b, bool is_stub) {
53
54 // WARNING: Initial instruction MUST be 5 bytes or longer so that
55 // NativeJump::patch_verified_entry will be able to patch out the entry
56 // code safely. The push to verify stack depth is ok at 5 bytes,
57 // the frame allocation can be either 3 or 6 bytes. So if we don't do
58 // stack bang then we must use the 6 byte frame allocation even if
59 // we have no frame. :-(
60 assert(stack_bang_size >= framesize || stack_bang_size <= 0, "stack bang size incorrect");
61
62 assert((framesize & (StackAlignmentInBytes-1)) == 0, "frame size not aligned");
63 // Remove word for return addr
64 framesize -= wordSize;
65 stack_bang_size -= wordSize;
66
67 // Calls to C2R adapters often do not accept exceptional returns.
68 // We require that their callers must bang for them. But be careful, because
69 // some VM calls (such as call site linkage) can use several kilobytes of
70 // stack. But the stack safety zone should account for that.
71 // See bugs 4446381, 4468289, 4497237.
72 if (stack_bang_size > 0) {
85 // Create frame
86 if (framesize) {
87 subptr(rsp, framesize);
88 }
89 } else {
90 // Create frame (force generation of a 4 byte immediate value)
91 subptr_imm32(rsp, framesize);
92
93 // Save RBP register now.
94 framesize -= wordSize;
95 movptr(Address(rsp, framesize), rbp);
96 // Save caller's stack pointer into RBP if the frame pointer is preserved.
97 if (PreserveFramePointer) {
98 movptr(rbp, rsp);
99 if (framesize > 0) {
100 addptr(rbp, framesize);
101 }
102 }
103 }
104
105 if (VerifyStackAtCalls) { // Majik cookie to verify stack depth
106 framesize -= wordSize;
107 movptr(Address(rsp, framesize), (int32_t)0xbadb100d);
108 }
109
110 #ifdef ASSERT
111 if (VerifyStackAtCalls) {
112 Label L;
113 push(rax);
114 mov(rax, rsp);
115 andptr(rax, StackAlignmentInBytes-1);
116 cmpptr(rax, StackAlignmentInBytes-wordSize);
117 pop(rax);
118 jcc(Assembler::equal, L);
119 STOP("Stack is not properly aligned!");
120 bind(L);
121 }
122 #endif
123
124 if (!is_stub) {
125 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
126 // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
127 Label dummy_slow_path;
128 Label dummy_continuation;
129 Label* slow_path = &dummy_slow_path;
130 Label* continuation = &dummy_continuation;
131 if (!Compile::current()->output()->in_scratch_emit_size()) {
132 // Use real labels from actual stub when not emitting code for the purpose of measuring its size
133 C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
134 Compile::current()->output()->add_stub(stub);
135 slow_path = &stub->entry();
136 continuation = &stub->continuation();
137 }
138 bs->nmethod_entry_barrier(this, slow_path, continuation);
139 }
140 }
141
142 inline Assembler::AvxVectorLen C2_MacroAssembler::vector_length_encoding(int vlen_in_bytes) {
143 switch (vlen_in_bytes) {
144 case 4: // fall-through
145 case 8: // fall-through
146 case 16: return Assembler::AVX_128bit;
147 case 32: return Assembler::AVX_256bit;
148 case 64: return Assembler::AVX_512bit;
149
150 default: {
151 ShouldNotReachHere();
152 return Assembler::AVX_NoVec;
153 }
154 }
155 }
156
157 // fast_lock and fast_unlock used by C2
158
159 // Because the transitions from emitted code to the runtime
257
258 Label IsInflated, DONE_LABEL, NO_COUNT, COUNT;
259
260 if (DiagnoseSyncOnValueBasedClasses != 0) {
261 load_klass(tmpReg, objReg, scrReg);
262 testb(Address(tmpReg, Klass::misc_flags_offset()), KlassFlags::_misc_is_value_based_class);
263 jcc(Assembler::notZero, DONE_LABEL);
264 }
265
266 movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // [FETCH]
267 testptr(tmpReg, markWord::monitor_value); // inflated vs stack-locked|neutral
268 jcc(Assembler::notZero, IsInflated);
269
270 if (LockingMode == LM_MONITOR) {
271 // Clear ZF so that we take the slow path at the DONE label. objReg is known to be not 0.
272 testptr(objReg, objReg);
273 } else {
274 assert(LockingMode == LM_LEGACY, "must be");
275 // Attempt stack-locking ...
276 orptr (tmpReg, markWord::unlocked_value);
277 movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS
278 lock();
279 cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Updates tmpReg
280 jcc(Assembler::equal, COUNT); // Success
281
282 // Recursive locking.
283 // The object is stack-locked: markword contains stack pointer to BasicLock.
284 // Locked by current thread if difference with current SP is less than one page.
285 subptr(tmpReg, rsp);
286 // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
287 andptr(tmpReg, (int32_t) (7 - (int)os::vm_page_size()) );
288 movptr(Address(boxReg, 0), tmpReg);
289 }
290 jmp(DONE_LABEL);
291
292 bind(IsInflated);
293 // The object is inflated. tmpReg contains pointer to ObjectMonitor* + markWord::monitor_value
294
295 // Unconditionally set box->_displaced_header = markWord::unused_mark().
296 // Without cast to int32_t this style of movptr will destroy r10 which is typically obj.
|
32 #include "opto/output.hpp"
33 #include "opto/opcodes.hpp"
34 #include "opto/subnode.hpp"
35 #include "runtime/globals.hpp"
36 #include "runtime/objectMonitor.hpp"
37 #include "runtime/stubRoutines.hpp"
38 #include "utilities/checkedCast.hpp"
39 #include "utilities/globalDefinitions.hpp"
40 #include "utilities/powerOfTwo.hpp"
41 #include "utilities/sizes.hpp"
42
43 #ifdef PRODUCT
44 #define BLOCK_COMMENT(str) /* nothing */
45 #define STOP(error) stop(error)
46 #else
47 #define BLOCK_COMMENT(str) block_comment(str)
48 #define STOP(error) block_comment(error); stop(error)
49 #endif
50
51 // C2 compiled method's prolog code.
52 void C2_MacroAssembler::verified_entry(Compile* C, int sp_inc) {
53 if (C->clinit_barrier_on_entry()) {
54 assert(VM_Version::supports_fast_class_init_checks(), "sanity");
55 assert(!C->method()->holder()->is_not_initialized(), "initialization should have been started");
56
57 Label L_skip_barrier;
58 Register klass = rscratch1;
59
60 mov_metadata(klass, C->method()->holder()->constant_encoding());
61 clinit_barrier(klass, &L_skip_barrier /*L_fast_path*/);
62
63 jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
64
65 bind(L_skip_barrier);
66 }
67
68 int framesize = C->output()->frame_size_in_bytes();
69 int bangsize = C->output()->bang_size_in_bytes();
70 bool fp_mode_24b = false;
71 int stack_bang_size = C->output()->need_stack_bang(bangsize) ? bangsize : 0;
72
73 // WARNING: Initial instruction MUST be 5 bytes or longer so that
74 // NativeJump::patch_verified_entry will be able to patch out the entry
75 // code safely. The push to verify stack depth is ok at 5 bytes,
76 // the frame allocation can be either 3 or 6 bytes. So if we don't do
77 // stack bang then we must use the 6 byte frame allocation even if
78 // we have no frame. :-(
79 assert(stack_bang_size >= framesize || stack_bang_size <= 0, "stack bang size incorrect");
80
81 assert((framesize & (StackAlignmentInBytes-1)) == 0, "frame size not aligned");
82 // Remove word for return addr
83 framesize -= wordSize;
84 stack_bang_size -= wordSize;
85
86 // Calls to C2R adapters often do not accept exceptional returns.
87 // We require that their callers must bang for them. But be careful, because
88 // some VM calls (such as call site linkage) can use several kilobytes of
89 // stack. But the stack safety zone should account for that.
90 // See bugs 4446381, 4468289, 4497237.
91 if (stack_bang_size > 0) {
104 // Create frame
105 if (framesize) {
106 subptr(rsp, framesize);
107 }
108 } else {
109 // Create frame (force generation of a 4 byte immediate value)
110 subptr_imm32(rsp, framesize);
111
112 // Save RBP register now.
113 framesize -= wordSize;
114 movptr(Address(rsp, framesize), rbp);
115 // Save caller's stack pointer into RBP if the frame pointer is preserved.
116 if (PreserveFramePointer) {
117 movptr(rbp, rsp);
118 if (framesize > 0) {
119 addptr(rbp, framesize);
120 }
121 }
122 }
123
124 if (C->needs_stack_repair()) {
125 // Save stack increment just below the saved rbp (also account for fixed framesize and rbp)
126 assert((sp_inc & (StackAlignmentInBytes-1)) == 0, "stack increment not aligned");
127 movptr(Address(rsp, framesize - wordSize), sp_inc + framesize + wordSize);
128 }
129
130 if (VerifyStackAtCalls) { // Majik cookie to verify stack depth
131 framesize -= wordSize;
132 movptr(Address(rsp, framesize), (int32_t)0xbadb100d);
133 }
134
135 #ifdef ASSERT
136 if (VerifyStackAtCalls) {
137 Label L;
138 push(rax);
139 mov(rax, rsp);
140 andptr(rax, StackAlignmentInBytes-1);
141 cmpptr(rax, StackAlignmentInBytes-wordSize);
142 pop(rax);
143 jcc(Assembler::equal, L);
144 STOP("Stack is not properly aligned!");
145 bind(L);
146 }
147 #endif
148 }
149
150 void C2_MacroAssembler::entry_barrier() {
151 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
152 // We put the non-hot code of the nmethod entry barrier out-of-line in a stub.
153 Label dummy_slow_path;
154 Label dummy_continuation;
155 Label* slow_path = &dummy_slow_path;
156 Label* continuation = &dummy_continuation;
157 if (!Compile::current()->output()->in_scratch_emit_size()) {
158 // Use real labels from actual stub when not emitting code for the purpose of measuring its size
159 C2EntryBarrierStub* stub = new (Compile::current()->comp_arena()) C2EntryBarrierStub();
160 Compile::current()->output()->add_stub(stub);
161 slow_path = &stub->entry();
162 continuation = &stub->continuation();
163 }
164 bs->nmethod_entry_barrier(this, slow_path, continuation);
165 }
166
167 inline Assembler::AvxVectorLen C2_MacroAssembler::vector_length_encoding(int vlen_in_bytes) {
168 switch (vlen_in_bytes) {
169 case 4: // fall-through
170 case 8: // fall-through
171 case 16: return Assembler::AVX_128bit;
172 case 32: return Assembler::AVX_256bit;
173 case 64: return Assembler::AVX_512bit;
174
175 default: {
176 ShouldNotReachHere();
177 return Assembler::AVX_NoVec;
178 }
179 }
180 }
181
182 // fast_lock and fast_unlock used by C2
183
184 // Because the transitions from emitted code to the runtime
282
283 Label IsInflated, DONE_LABEL, NO_COUNT, COUNT;
284
285 if (DiagnoseSyncOnValueBasedClasses != 0) {
286 load_klass(tmpReg, objReg, scrReg);
287 testb(Address(tmpReg, Klass::misc_flags_offset()), KlassFlags::_misc_is_value_based_class);
288 jcc(Assembler::notZero, DONE_LABEL);
289 }
290
291 movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // [FETCH]
292 testptr(tmpReg, markWord::monitor_value); // inflated vs stack-locked|neutral
293 jcc(Assembler::notZero, IsInflated);
294
295 if (LockingMode == LM_MONITOR) {
296 // Clear ZF so that we take the slow path at the DONE label. objReg is known to be not 0.
297 testptr(objReg, objReg);
298 } else {
299 assert(LockingMode == LM_LEGACY, "must be");
300 // Attempt stack-locking ...
301 orptr (tmpReg, markWord::unlocked_value);
302 if (EnableValhalla) {
303 // Mask inline_type bit such that we go to the slow path if object is an inline type
304 andptr(tmpReg, ~((int) markWord::inline_type_bit_in_place));
305 }
306 movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS
307 lock();
308 cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Updates tmpReg
309 jcc(Assembler::equal, COUNT); // Success
310
311 // Recursive locking.
312 // The object is stack-locked: markword contains stack pointer to BasicLock.
313 // Locked by current thread if difference with current SP is less than one page.
314 subptr(tmpReg, rsp);
315 // Next instruction set ZFlag == 1 (Success) if difference is less then one page.
316 andptr(tmpReg, (int32_t) (7 - (int)os::vm_page_size()) );
317 movptr(Address(boxReg, 0), tmpReg);
318 }
319 jmp(DONE_LABEL);
320
321 bind(IsInflated);
322 // The object is inflated. tmpReg contains pointer to ObjectMonitor* + markWord::monitor_value
323
324 // Unconditionally set box->_displaced_header = markWord::unused_mark().
325 // Without cast to int32_t this style of movptr will destroy r10 which is typically obj.
|