1 /*
2 * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
5234
5235 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
5236 // get mirror
5237 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
5238 load_method_holder(mirror, method);
5239 movptr(mirror, Address(mirror, mirror_offset));
5240 resolve_oop_handle(mirror, tmp);
5241 }
5242
5243 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
5244 load_method_holder(rresult, rmethod);
5245 movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
5246 }
5247
5248 void MacroAssembler::load_method_holder(Register holder, Register method) {
5249 movptr(holder, Address(method, Method::const_offset())); // ConstMethod*
5250 movptr(holder, Address(holder, ConstMethod::constants_offset())); // ConstantPool*
5251 movptr(holder, Address(holder, ConstantPool::pool_holder_offset())); // InstanceKlass*
5252 }
5253
5254 void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
5255 assert_different_registers(src, tmp);
5256 assert_different_registers(dst, tmp);
5257 #ifdef _LP64
5258 if (UseCompressedClassPointers) {
5259 movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5260 decode_klass_not_null(dst, tmp);
5261 } else
5262 #endif
5263 movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5264 }
5265
5266 void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
5267 assert_different_registers(src, tmp);
5268 assert_different_registers(dst, tmp);
5269 #ifdef _LP64
5270 if (UseCompressedClassPointers) {
5271 encode_klass_not_null(src, tmp);
5272 movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5273 } else
5274 #endif
5275 movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5276 }
5277
5278 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
5279 Register tmp1, Register thread_tmp) {
5280 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5281 decorators = AccessInternal::decorator_fixup(decorators, type);
5282 bool as_raw = (decorators & AS_RAW) != 0;
5283 if (as_raw) {
5284 bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5285 } else {
5286 bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5287 }
5288 }
5289
5290 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register val,
5291 Register tmp1, Register tmp2, Register tmp3) {
5292 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5293 decorators = AccessInternal::decorator_fixup(decorators, type);
5294 bool as_raw = (decorators & AS_RAW) != 0;
5295 if (as_raw) {
5296 bs->BarrierSetAssembler::store_at(this, decorators, type, dst, val, tmp1, tmp2, tmp3);
5297 } else {
5305 }
5306
5307 // Doesn't do verification, generates fixed size code
5308 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
5309 Register thread_tmp, DecoratorSet decorators) {
5310 access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
5311 }
5312
5313 void MacroAssembler::store_heap_oop(Address dst, Register val, Register tmp1,
5314 Register tmp2, Register tmp3, DecoratorSet decorators) {
5315 access_store_at(T_OBJECT, IN_HEAP | decorators, dst, val, tmp1, tmp2, tmp3);
5316 }
5317
5318 // Used for storing nulls.
5319 void MacroAssembler::store_heap_oop_null(Address dst) {
5320 access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
5321 }
5322
5323 #ifdef _LP64
5324 void MacroAssembler::store_klass_gap(Register dst, Register src) {
5325 if (UseCompressedClassPointers) {
5326 // Store to klass gap in destination
5327 movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
5328 }
5329 }
5330
5331 #ifdef ASSERT
5332 void MacroAssembler::verify_heapbase(const char* msg) {
5333 assert (UseCompressedOops, "should be compressed");
5334 assert (Universe::heap() != nullptr, "java heap should be initialized");
5335 if (CheckCompressedOops) {
5336 Label ok;
5337 ExternalAddress src2(CompressedOops::ptrs_base_addr());
5338 const bool is_src2_reachable = reachable(src2);
5339 if (!is_src2_reachable) {
5340 push(rscratch1); // cmpptr trashes rscratch1
5341 }
5342 cmpptr(r12_heapbase, src2, rscratch1);
5343 jcc(Assembler::equal, ok);
5344 STOP(msg);
9798
9799 #endif // !WIN32 || _LP64
9800
9801 void MacroAssembler::check_stack_alignment(Register sp, const char* msg, unsigned bias, Register tmp) {
9802 Label L_stack_ok;
9803 if (bias == 0) {
9804 testptr(sp, 2 * wordSize - 1);
9805 } else {
9806 // lea(tmp, Address(rsp, bias);
9807 mov(tmp, sp);
9808 addptr(tmp, bias);
9809 testptr(tmp, 2 * wordSize - 1);
9810 }
9811 jcc(Assembler::equal, L_stack_ok);
9812 block_comment(msg);
9813 stop(msg);
9814 bind(L_stack_ok);
9815 }
9816
9817 // Implements lightweight-locking.
9818 // Branches to slow upon failure to lock the object, with ZF cleared.
9819 // Falls through upon success with unspecified ZF.
9820 //
9821 // obj: the object to be locked
9822 // hdr: the (pre-loaded) header of the object, must be rax
9823 // thread: the thread which attempts to lock obj
9824 // tmp: a temporary register
9825 void MacroAssembler::lightweight_lock(Register obj, Register hdr, Register thread, Register tmp, Label& slow) {
9826 assert(hdr == rax, "header must be in rax for cmpxchg");
9827 assert_different_registers(obj, hdr, thread, tmp);
9828
9829 // First we need to check if the lock-stack has room for pushing the object reference.
9830 // Note: we subtract 1 from the end-offset so that we can do a 'greater' comparison, instead
9831 // of 'greaterEqual' below, which readily clears the ZF. This makes C2 code a little simpler and
9832 // avoids one branch.
9833 cmpl(Address(thread, JavaThread::lock_stack_top_offset()), LockStack::end_offset() - 1);
9834 jcc(Assembler::greater, slow);
9835
9836 // Now we attempt to take the fast-lock.
9837 // Clear lock_mask bits (locked state).
9838 andptr(hdr, ~(int32_t)markWord::lock_mask_in_place);
9839 movptr(tmp, hdr);
9840 // Set unlocked_value bit.
9841 orptr(hdr, markWord::unlocked_value);
9842 lock();
9843 cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9844 jcc(Assembler::notEqual, slow);
9845
9846 // If successful, push object to lock-stack.
9847 movl(tmp, Address(thread, JavaThread::lock_stack_top_offset()));
9848 movptr(Address(thread, tmp), obj);
9849 incrementl(tmp, oopSize);
9850 movl(Address(thread, JavaThread::lock_stack_top_offset()), tmp);
9851 }
9852
9853 // Implements lightweight-unlocking.
9854 // Branches to slow upon failure, with ZF cleared.
9855 // Falls through upon success, with unspecified ZF.
9856 //
9857 // obj: the object to be unlocked
9858 // hdr: the (pre-loaded) header of the object, must be rax
9859 // tmp: a temporary register
9860 void MacroAssembler::lightweight_unlock(Register obj, Register hdr, Register tmp, Label& slow) {
9861 assert(hdr == rax, "header must be in rax for cmpxchg");
9862 assert_different_registers(obj, hdr, tmp);
9863
9864 // Mark-word must be lock_mask now, try to swing it back to unlocked_value.
9865 movptr(tmp, hdr); // The expected old value
9866 orptr(tmp, markWord::unlocked_value);
9867 lock();
9868 cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9869 jcc(Assembler::notEqual, slow);
9870 // Pop the lock object from the lock-stack.
9871 #ifdef _LP64
9872 const Register thread = r15_thread;
9873 #else
9874 const Register thread = rax;
9875 get_thread(thread);
9876 #endif
9877 subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
9878 #ifdef ASSERT
9879 movl(tmp, Address(thread, JavaThread::lock_stack_top_offset()));
9880 movptr(Address(thread, tmp), 0);
9881 #endif
9882 }
|
1 /*
2 * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
5234
5235 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
5236 // get mirror
5237 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
5238 load_method_holder(mirror, method);
5239 movptr(mirror, Address(mirror, mirror_offset));
5240 resolve_oop_handle(mirror, tmp);
5241 }
5242
5243 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
5244 load_method_holder(rresult, rmethod);
5245 movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
5246 }
5247
5248 void MacroAssembler::load_method_holder(Register holder, Register method) {
5249 movptr(holder, Address(method, Method::const_offset())); // ConstMethod*
5250 movptr(holder, Address(holder, ConstMethod::constants_offset())); // ConstantPool*
5251 movptr(holder, Address(holder, ConstantPool::pool_holder_offset())); // InstanceKlass*
5252 }
5253
5254 #ifdef _LP64
5255 void MacroAssembler::load_nklass_compact(Register dst, Register src) {
5256 assert(UseCompactObjectHeaders, "expect compact object headers");
5257
5258 Label fast;
5259 movq(dst, Address(src, oopDesc::mark_offset_in_bytes()));
5260 testb(dst, markWord::monitor_value);
5261 jccb(Assembler::zero, fast);
5262
5263 // Fetch displaced header
5264 movq(dst, Address(dst, OM_OFFSET_NO_MONITOR_VALUE_TAG(header)));
5265
5266 bind(fast);
5267 shrq(dst, markWord::klass_shift);
5268 }
5269 #endif
5270
5271 void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
5272 assert_different_registers(src, tmp);
5273 assert_different_registers(dst, tmp);
5274 #ifdef _LP64
5275 if (UseCompactObjectHeaders) {
5276 load_nklass_compact(dst, src);
5277 decode_klass_not_null(dst, tmp);
5278 } else if (UseCompressedClassPointers) {
5279 movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5280 decode_klass_not_null(dst, tmp);
5281 } else
5282 #endif
5283 {
5284 movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5285 }
5286 }
5287
5288 void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
5289 assert(!UseCompactObjectHeaders, "not with compact headers");
5290 assert_different_registers(src, tmp);
5291 assert_different_registers(dst, tmp);
5292 #ifdef _LP64
5293 if (UseCompressedClassPointers) {
5294 encode_klass_not_null(src, tmp);
5295 movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5296 } else
5297 #endif
5298 movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5299 }
5300
5301 void MacroAssembler::cmp_klass(Register klass, Register obj, Register tmp) {
5302 #ifdef _LP64
5303 if (UseCompactObjectHeaders) {
5304 load_nklass_compact(tmp, obj);
5305 cmpl(klass, tmp);
5306 } else if (UseCompressedClassPointers) {
5307 cmpl(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
5308 } else
5309 #endif
5310 {
5311 cmpptr(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
5312 }
5313 }
5314
5315 void MacroAssembler::cmp_klass(Register src, Register dst, Register tmp1, Register tmp2) {
5316 #ifdef _LP64
5317 if (UseCompactObjectHeaders) {
5318 assert(tmp2 != noreg, "need tmp2");
5319 assert_different_registers(src, dst, tmp1, tmp2);
5320 load_nklass_compact(tmp1, src);
5321 load_nklass_compact(tmp2, dst);
5322 cmpl(tmp1, tmp2);
5323 } else if (UseCompressedClassPointers) {
5324 movl(tmp1, Address(src, oopDesc::klass_offset_in_bytes()));
5325 cmpl(tmp1, Address(dst, oopDesc::klass_offset_in_bytes()));
5326 } else
5327 #endif
5328 {
5329 movptr(tmp1, Address(src, oopDesc::klass_offset_in_bytes()));
5330 cmpptr(tmp1, Address(dst, oopDesc::klass_offset_in_bytes()));
5331 }
5332 }
5333
5334 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
5335 Register tmp1, Register thread_tmp) {
5336 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5337 decorators = AccessInternal::decorator_fixup(decorators, type);
5338 bool as_raw = (decorators & AS_RAW) != 0;
5339 if (as_raw) {
5340 bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5341 } else {
5342 bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5343 }
5344 }
5345
5346 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register val,
5347 Register tmp1, Register tmp2, Register tmp3) {
5348 BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5349 decorators = AccessInternal::decorator_fixup(decorators, type);
5350 bool as_raw = (decorators & AS_RAW) != 0;
5351 if (as_raw) {
5352 bs->BarrierSetAssembler::store_at(this, decorators, type, dst, val, tmp1, tmp2, tmp3);
5353 } else {
5361 }
5362
5363 // Doesn't do verification, generates fixed size code
5364 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
5365 Register thread_tmp, DecoratorSet decorators) {
5366 access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
5367 }
5368
5369 void MacroAssembler::store_heap_oop(Address dst, Register val, Register tmp1,
5370 Register tmp2, Register tmp3, DecoratorSet decorators) {
5371 access_store_at(T_OBJECT, IN_HEAP | decorators, dst, val, tmp1, tmp2, tmp3);
5372 }
5373
5374 // Used for storing nulls.
5375 void MacroAssembler::store_heap_oop_null(Address dst) {
5376 access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
5377 }
5378
5379 #ifdef _LP64
5380 void MacroAssembler::store_klass_gap(Register dst, Register src) {
5381 assert(!UseCompactObjectHeaders, "Don't use with compact headers");
5382 if (UseCompressedClassPointers) {
5383 // Store to klass gap in destination
5384 movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
5385 }
5386 }
5387
5388 #ifdef ASSERT
5389 void MacroAssembler::verify_heapbase(const char* msg) {
5390 assert (UseCompressedOops, "should be compressed");
5391 assert (Universe::heap() != nullptr, "java heap should be initialized");
5392 if (CheckCompressedOops) {
5393 Label ok;
5394 ExternalAddress src2(CompressedOops::ptrs_base_addr());
5395 const bool is_src2_reachable = reachable(src2);
5396 if (!is_src2_reachable) {
5397 push(rscratch1); // cmpptr trashes rscratch1
5398 }
5399 cmpptr(r12_heapbase, src2, rscratch1);
5400 jcc(Assembler::equal, ok);
5401 STOP(msg);
9855
9856 #endif // !WIN32 || _LP64
9857
9858 void MacroAssembler::check_stack_alignment(Register sp, const char* msg, unsigned bias, Register tmp) {
9859 Label L_stack_ok;
9860 if (bias == 0) {
9861 testptr(sp, 2 * wordSize - 1);
9862 } else {
9863 // lea(tmp, Address(rsp, bias);
9864 mov(tmp, sp);
9865 addptr(tmp, bias);
9866 testptr(tmp, 2 * wordSize - 1);
9867 }
9868 jcc(Assembler::equal, L_stack_ok);
9869 block_comment(msg);
9870 stop(msg);
9871 bind(L_stack_ok);
9872 }
9873
9874 // Implements lightweight-locking.
9875 //
9876 // obj: the object to be locked
9877 // reg_rax: rax
9878 // thread: the thread which attempts to lock obj
9879 // tmp: a temporary register
9880 void MacroAssembler::lightweight_lock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow) {
9881 assert(reg_rax == rax, "");
9882 assert_different_registers(obj, reg_rax, thread, tmp);
9883
9884 Label push;
9885 const Register top = tmp;
9886
9887 // Preload the markWord. It is important that this is the first
9888 // instruction emitted as it is part of C1's null check semantics.
9889 movptr(reg_rax, Address(obj, oopDesc::mark_offset_in_bytes()));
9890
9891 // Load top.
9892 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9893
9894 // Check if the lock-stack is full.
9895 cmpl(top, LockStack::end_offset());
9896 jcc(Assembler::greaterEqual, slow);
9897
9898 // Check for recursion.
9899 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
9900 jcc(Assembler::equal, push);
9901
9902 // Check header for monitor (0b10).
9903 testptr(reg_rax, markWord::monitor_value);
9904 jcc(Assembler::notZero, slow);
9905
9906 // Try to lock. Transition lock bits 0b01 => 0b00
9907 movptr(tmp, reg_rax);
9908 andptr(tmp, ~(int32_t)markWord::unlocked_value);
9909 orptr(reg_rax, markWord::unlocked_value);
9910 lock(); cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9911 jcc(Assembler::notEqual, slow);
9912
9913 // Restore top, CAS clobbers register.
9914 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9915
9916 bind(push);
9917 // After successful lock, push object on lock-stack.
9918 movptr(Address(thread, top), obj);
9919 incrementl(top, oopSize);
9920 movl(Address(thread, JavaThread::lock_stack_top_offset()), top);
9921 }
9922
9923 // Implements lightweight-unlocking.
9924 //
9925 // obj: the object to be unlocked
9926 // reg_rax: rax
9927 // thread: the thread
9928 // tmp: a temporary register
9929 //
9930 // x86_32 Note: reg_rax and thread may alias each other due to limited register
9931 // availiability.
9932 void MacroAssembler::lightweight_unlock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow) {
9933 assert(reg_rax == rax, "");
9934 assert_different_registers(obj, reg_rax, tmp);
9935 LP64_ONLY(assert_different_registers(obj, reg_rax, thread, tmp);)
9936
9937 Label unlocked, push_and_slow;
9938 const Register top = tmp;
9939
9940 // Check if obj is top of lock-stack.
9941 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9942 cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
9943 jcc(Assembler::notEqual, slow);
9944
9945 // Pop lock-stack.
9946 DEBUG_ONLY(movptr(Address(thread, top, Address::times_1, -oopSize), 0);)
9947 subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
9948
9949 // Check if recursive.
9950 cmpptr(obj, Address(thread, top, Address::times_1, -2 * oopSize));
9951 jcc(Assembler::equal, unlocked);
9952
9953 // Not recursive. Check header for monitor (0b10).
9954 movptr(reg_rax, Address(obj, oopDesc::mark_offset_in_bytes()));
9955 testptr(reg_rax, markWord::monitor_value);
9956 jcc(Assembler::notZero, push_and_slow);
9957
9958 #ifdef ASSERT
9959 // Check header not unlocked (0b01).
9960 Label not_unlocked;
9961 testptr(reg_rax, markWord::unlocked_value);
9962 jcc(Assembler::zero, not_unlocked);
9963 stop("lightweight_unlock already unlocked");
9964 bind(not_unlocked);
9965 #endif
9966
9967 // Try to unlock. Transition lock bits 0b00 => 0b01
9968 movptr(tmp, reg_rax);
9969 orptr(tmp, markWord::unlocked_value);
9970 lock(); cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9971 jcc(Assembler::equal, unlocked);
9972
9973 bind(push_and_slow);
9974 // Restore lock-stack and handle the unlock in runtime.
9975 if (thread == reg_rax) {
9976 // On x86_32 we may lose the thread.
9977 get_thread(thread);
9978 }
9979 #ifdef ASSERT
9980 movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9981 movptr(Address(thread, top), obj);
9982 #endif
9983 addl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
9984 jmp(slow);
9985
9986 bind(unlocked);
9987 }
|