1 /*
2 * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
1235 const int mark_offset = lock_offset +
1236 BasicLock::displaced_header_offset_in_bytes();
1237
1238 // Load object pointer into obj_reg
1239 movptr(obj_reg, Address(lock_reg, obj_offset));
1240
1241 if (DiagnoseSyncOnValueBasedClasses != 0) {
1242 load_klass(tmp_reg, obj_reg, rklass_decode_tmp);
1243 movl(tmp_reg, Address(tmp_reg, Klass::access_flags_offset()));
1244 testl(tmp_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
1245 jcc(Assembler::notZero, slow_case);
1246 }
1247
1248 if (LockingMode == LM_LIGHTWEIGHT) {
1249 #ifdef _LP64
1250 const Register thread = r15_thread;
1251 #else
1252 const Register thread = lock_reg;
1253 get_thread(thread);
1254 #endif
1255 // Load object header, prepare for CAS from unlocked to locked.
1256 movptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1257 lightweight_lock(obj_reg, swap_reg, thread, tmp_reg, slow_case);
1258 } else if (LockingMode == LM_LEGACY) {
1259 // Load immediate 1 into swap_reg %rax
1260 movl(swap_reg, 1);
1261
1262 // Load (object->mark() | 1) into swap_reg %rax
1263 orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1264
1265 // Save (object->mark() | 1) into BasicLock's displaced header
1266 movptr(Address(lock_reg, mark_offset), swap_reg);
1267
1268 assert(lock_offset == 0,
1269 "displaced header must be first word in BasicObjectLock");
1270
1271 lock();
1272 cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1273 jcc(Assembler::zero, count_locking);
1274
1275 const int zero_bits = LP64_ONLY(7) NOT_LP64(3);
1276
1354 const Register swap_reg = rax; // Must use rax for cmpxchg instruction
1355 const Register header_reg = LP64_ONLY(c_rarg2) NOT_LP64(rbx); // Will contain the old oopMark
1356 const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1357
1358 save_bcp(); // Save in case of exception
1359
1360 if (LockingMode != LM_LIGHTWEIGHT) {
1361 // Convert from BasicObjectLock structure to object and BasicLock
1362 // structure Store the BasicLock address into %rax
1363 lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
1364 }
1365
1366 // Load oop into obj_reg(%c_rarg3)
1367 movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
1368
1369 // Free entry
1370 movptr(Address(lock_reg, BasicObjectLock::obj_offset()), NULL_WORD);
1371
1372 if (LockingMode == LM_LIGHTWEIGHT) {
1373 #ifdef _LP64
1374 const Register thread = r15_thread;
1375 #else
1376 const Register thread = header_reg;
1377 get_thread(thread);
1378 #endif
1379 // Handle unstructured locking.
1380 Register tmp = swap_reg;
1381 movl(tmp, Address(thread, JavaThread::lock_stack_top_offset()));
1382 cmpptr(obj_reg, Address(thread, tmp, Address::times_1, -oopSize));
1383 jcc(Assembler::notEqual, slow_case);
1384 // Try to swing header from locked to unlocked.
1385 movptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1386 andptr(swap_reg, ~(int32_t)markWord::lock_mask_in_place);
1387 lightweight_unlock(obj_reg, swap_reg, header_reg, slow_case);
1388 } else if (LockingMode == LM_LEGACY) {
1389 // Load the old header from BasicLock structure
1390 movptr(header_reg, Address(swap_reg,
1391 BasicLock::displaced_header_offset_in_bytes()));
1392
1393 // Test for recursion
1394 testptr(header_reg, header_reg);
1395
1396 // zero for recursive case
1397 jcc(Assembler::zero, count_locking);
1398
1399 // Atomic swap back the old header
1400 lock();
1401 cmpxchgptr(header_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1402
1403 // zero for simple unlock of a stack-lock case
1404 jcc(Assembler::notZero, slow_case);
1405
1406 bind(count_locking);
1407 }
|
1 /*
2 * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
1235 const int mark_offset = lock_offset +
1236 BasicLock::displaced_header_offset_in_bytes();
1237
1238 // Load object pointer into obj_reg
1239 movptr(obj_reg, Address(lock_reg, obj_offset));
1240
1241 if (DiagnoseSyncOnValueBasedClasses != 0) {
1242 load_klass(tmp_reg, obj_reg, rklass_decode_tmp);
1243 movl(tmp_reg, Address(tmp_reg, Klass::access_flags_offset()));
1244 testl(tmp_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
1245 jcc(Assembler::notZero, slow_case);
1246 }
1247
1248 if (LockingMode == LM_LIGHTWEIGHT) {
1249 #ifdef _LP64
1250 const Register thread = r15_thread;
1251 #else
1252 const Register thread = lock_reg;
1253 get_thread(thread);
1254 #endif
1255 lightweight_lock(obj_reg, swap_reg, thread, tmp_reg, slow_case);
1256 } else if (LockingMode == LM_LEGACY) {
1257 // Load immediate 1 into swap_reg %rax
1258 movl(swap_reg, 1);
1259
1260 // Load (object->mark() | 1) into swap_reg %rax
1261 orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1262
1263 // Save (object->mark() | 1) into BasicLock's displaced header
1264 movptr(Address(lock_reg, mark_offset), swap_reg);
1265
1266 assert(lock_offset == 0,
1267 "displaced header must be first word in BasicObjectLock");
1268
1269 lock();
1270 cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1271 jcc(Assembler::zero, count_locking);
1272
1273 const int zero_bits = LP64_ONLY(7) NOT_LP64(3);
1274
1352 const Register swap_reg = rax; // Must use rax for cmpxchg instruction
1353 const Register header_reg = LP64_ONLY(c_rarg2) NOT_LP64(rbx); // Will contain the old oopMark
1354 const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1355
1356 save_bcp(); // Save in case of exception
1357
1358 if (LockingMode != LM_LIGHTWEIGHT) {
1359 // Convert from BasicObjectLock structure to object and BasicLock
1360 // structure Store the BasicLock address into %rax
1361 lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
1362 }
1363
1364 // Load oop into obj_reg(%c_rarg3)
1365 movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
1366
1367 // Free entry
1368 movptr(Address(lock_reg, BasicObjectLock::obj_offset()), NULL_WORD);
1369
1370 if (LockingMode == LM_LIGHTWEIGHT) {
1371 #ifdef _LP64
1372 lightweight_unlock(obj_reg, swap_reg, r15_thread, header_reg, slow_case);
1373 #else
1374 // This relies on the implementation of lightweight_unlock being able to handle
1375 // that the reg_rax and thread Register parameters may alias each other.
1376 get_thread(swap_reg);
1377 lightweight_unlock(obj_reg, swap_reg, swap_reg, header_reg, slow_case);
1378 #endif
1379 } else if (LockingMode == LM_LEGACY) {
1380 // Load the old header from BasicLock structure
1381 movptr(header_reg, Address(swap_reg,
1382 BasicLock::displaced_header_offset_in_bytes()));
1383
1384 // Test for recursion
1385 testptr(header_reg, header_reg);
1386
1387 // zero for recursive case
1388 jcc(Assembler::zero, count_locking);
1389
1390 // Atomic swap back the old header
1391 lock();
1392 cmpxchgptr(header_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1393
1394 // zero for simple unlock of a stack-lock case
1395 jcc(Assembler::notZero, slow_case);
1396
1397 bind(count_locking);
1398 }
|