1166 const Register rklass_decode_tmp = rscratch1;
1167
1168 const int obj_offset = in_bytes(BasicObjectLock::obj_offset());
1169 const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
1170 const int mark_offset = lock_offset +
1171 BasicLock::displaced_header_offset_in_bytes();
1172
1173 // Load object pointer into obj_reg
1174 movptr(obj_reg, Address(lock_reg, obj_offset));
1175
1176 if (DiagnoseSyncOnValueBasedClasses != 0) {
1177 load_klass(tmp_reg, obj_reg, rklass_decode_tmp);
1178 movl(tmp_reg, Address(tmp_reg, Klass::access_flags_offset()));
1179 testl(tmp_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
1180 jcc(Assembler::notZero, slow_case);
1181 }
1182
1183 if (LockingMode == LM_LIGHTWEIGHT) {
1184 #ifdef _LP64
1185 const Register thread = r15_thread;
1186 #else
1187 const Register thread = lock_reg;
1188 get_thread(thread);
1189 #endif
1190 lightweight_lock(obj_reg, swap_reg, thread, tmp_reg, slow_case);
1191 } else if (LockingMode == LM_LEGACY) {
1192 // Load immediate 1 into swap_reg %rax
1193 movl(swap_reg, 1);
1194
1195 // Load (object->mark() | 1) into swap_reg %rax
1196 orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1197
1198 // Save (object->mark() | 1) into BasicLock's displaced header
1199 movptr(Address(lock_reg, mark_offset), swap_reg);
1200
1201 assert(lock_offset == 0,
1202 "displaced header must be first word in BasicObjectLock");
1203
1204 lock();
1205 cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1206 jcc(Assembler::zero, count_locking);
1207
1208 const int zero_bits = LP64_ONLY(7) NOT_LP64(3);
1209
1210 // Fast check for recursive lock.
1232 // These 3 tests can be done by evaluating the following
1233 // expression: ((mark - rsp) & (zero_bits - os::vm_page_size())),
1234 // assuming both stack pointer and pagesize have their
1235 // least significant bits clear.
1236 // NOTE: the mark is in swap_reg %rax as the result of cmpxchg
1237 subptr(swap_reg, rsp);
1238 andptr(swap_reg, zero_bits - (int)os::vm_page_size());
1239
1240 // Save the test result, for recursive case, the result is zero
1241 movptr(Address(lock_reg, mark_offset), swap_reg);
1242 jcc(Assembler::notZero, slow_case);
1243
1244 bind(count_locking);
1245 }
1246 inc_held_monitor_count();
1247 jmp(done);
1248
1249 bind(slow_case);
1250
1251 // Call the runtime routine for slow case
1252 if (LockingMode == LM_LIGHTWEIGHT) {
1253 call_VM(noreg,
1254 CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter_obj),
1255 obj_reg);
1256 } else {
1257 call_VM(noreg,
1258 CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1259 lock_reg);
1260 }
1261 bind(done);
1262 }
1263 }
1264
1265
1266 // Unlocks an object. Used in monitorexit bytecode and
1267 // remove_activation. Throws an IllegalMonitorException if object is
1268 // not locked by current thread.
1269 //
1270 // Args:
1271 // rdx, c_rarg1: BasicObjectLock for lock
1272 //
1273 // Kills:
1274 // rax
1275 // c_rarg0, c_rarg1, c_rarg2, c_rarg3, ... (param regs)
1276 // rscratch1 (scratch reg)
1277 // rax, rbx, rcx, rdx
1278 void InterpreterMacroAssembler::unlock_object(Register lock_reg) {
1279 assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1280 "The argument is only for looks. It must be c_rarg1");
1289 const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1290
1291 save_bcp(); // Save in case of exception
1292
1293 if (LockingMode != LM_LIGHTWEIGHT) {
1294 // Convert from BasicObjectLock structure to object and BasicLock
1295 // structure Store the BasicLock address into %rax
1296 lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
1297 }
1298
1299 // Load oop into obj_reg(%c_rarg3)
1300 movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
1301
1302 // Free entry
1303 movptr(Address(lock_reg, BasicObjectLock::obj_offset()), NULL_WORD);
1304
1305 if (LockingMode == LM_LIGHTWEIGHT) {
1306 #ifdef _LP64
1307 lightweight_unlock(obj_reg, swap_reg, r15_thread, header_reg, slow_case);
1308 #else
1309 // This relies on the implementation of lightweight_unlock being able to handle
1310 // that the reg_rax and thread Register parameters may alias each other.
1311 get_thread(swap_reg);
1312 lightweight_unlock(obj_reg, swap_reg, swap_reg, header_reg, slow_case);
1313 #endif
1314 } else if (LockingMode == LM_LEGACY) {
1315 // Load the old header from BasicLock structure
1316 movptr(header_reg, Address(swap_reg,
1317 BasicLock::displaced_header_offset_in_bytes()));
1318
1319 // Test for recursion
1320 testptr(header_reg, header_reg);
1321
1322 // zero for recursive case
1323 jcc(Assembler::zero, count_locking);
1324
1325 // Atomic swap back the old header
1326 lock();
1327 cmpxchgptr(header_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1328
1329 // zero for simple unlock of a stack-lock case
1330 jcc(Assembler::notZero, slow_case);
1331
1332 bind(count_locking);
|
1166 const Register rklass_decode_tmp = rscratch1;
1167
1168 const int obj_offset = in_bytes(BasicObjectLock::obj_offset());
1169 const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
1170 const int mark_offset = lock_offset +
1171 BasicLock::displaced_header_offset_in_bytes();
1172
1173 // Load object pointer into obj_reg
1174 movptr(obj_reg, Address(lock_reg, obj_offset));
1175
1176 if (DiagnoseSyncOnValueBasedClasses != 0) {
1177 load_klass(tmp_reg, obj_reg, rklass_decode_tmp);
1178 movl(tmp_reg, Address(tmp_reg, Klass::access_flags_offset()));
1179 testl(tmp_reg, JVM_ACC_IS_VALUE_BASED_CLASS);
1180 jcc(Assembler::notZero, slow_case);
1181 }
1182
1183 if (LockingMode == LM_LIGHTWEIGHT) {
1184 #ifdef _LP64
1185 const Register thread = r15_thread;
1186 lightweight_lock(lock_reg, obj_reg, swap_reg, thread, tmp_reg, slow_case);
1187 #else
1188 // Lacking registers and thread on x86_32. Always take slow path.
1189 jmp(slow_case);
1190 #endif
1191 } else if (LockingMode == LM_LEGACY) {
1192 // Load immediate 1 into swap_reg %rax
1193 movl(swap_reg, 1);
1194
1195 // Load (object->mark() | 1) into swap_reg %rax
1196 orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1197
1198 // Save (object->mark() | 1) into BasicLock's displaced header
1199 movptr(Address(lock_reg, mark_offset), swap_reg);
1200
1201 assert(lock_offset == 0,
1202 "displaced header must be first word in BasicObjectLock");
1203
1204 lock();
1205 cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1206 jcc(Assembler::zero, count_locking);
1207
1208 const int zero_bits = LP64_ONLY(7) NOT_LP64(3);
1209
1210 // Fast check for recursive lock.
1232 // These 3 tests can be done by evaluating the following
1233 // expression: ((mark - rsp) & (zero_bits - os::vm_page_size())),
1234 // assuming both stack pointer and pagesize have their
1235 // least significant bits clear.
1236 // NOTE: the mark is in swap_reg %rax as the result of cmpxchg
1237 subptr(swap_reg, rsp);
1238 andptr(swap_reg, zero_bits - (int)os::vm_page_size());
1239
1240 // Save the test result, for recursive case, the result is zero
1241 movptr(Address(lock_reg, mark_offset), swap_reg);
1242 jcc(Assembler::notZero, slow_case);
1243
1244 bind(count_locking);
1245 }
1246 inc_held_monitor_count();
1247 jmp(done);
1248
1249 bind(slow_case);
1250
1251 // Call the runtime routine for slow case
1252 call_VM(noreg,
1253 CAST_FROM_FN_PTR(address, InterpreterRuntime::monitorenter),
1254 lock_reg);
1255 bind(done);
1256 }
1257 }
1258
1259
1260 // Unlocks an object. Used in monitorexit bytecode and
1261 // remove_activation. Throws an IllegalMonitorException if object is
1262 // not locked by current thread.
1263 //
1264 // Args:
1265 // rdx, c_rarg1: BasicObjectLock for lock
1266 //
1267 // Kills:
1268 // rax
1269 // c_rarg0, c_rarg1, c_rarg2, c_rarg3, ... (param regs)
1270 // rscratch1 (scratch reg)
1271 // rax, rbx, rcx, rdx
1272 void InterpreterMacroAssembler::unlock_object(Register lock_reg) {
1273 assert(lock_reg == LP64_ONLY(c_rarg1) NOT_LP64(rdx),
1274 "The argument is only for looks. It must be c_rarg1");
1283 const Register obj_reg = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // Will contain the oop
1284
1285 save_bcp(); // Save in case of exception
1286
1287 if (LockingMode != LM_LIGHTWEIGHT) {
1288 // Convert from BasicObjectLock structure to object and BasicLock
1289 // structure Store the BasicLock address into %rax
1290 lea(swap_reg, Address(lock_reg, BasicObjectLock::lock_offset()));
1291 }
1292
1293 // Load oop into obj_reg(%c_rarg3)
1294 movptr(obj_reg, Address(lock_reg, BasicObjectLock::obj_offset()));
1295
1296 // Free entry
1297 movptr(Address(lock_reg, BasicObjectLock::obj_offset()), NULL_WORD);
1298
1299 if (LockingMode == LM_LIGHTWEIGHT) {
1300 #ifdef _LP64
1301 lightweight_unlock(obj_reg, swap_reg, r15_thread, header_reg, slow_case);
1302 #else
1303 // Lacking registers and thread on x86_32. Always take slow path.
1304 jmp(slow_case);
1305 #endif
1306 } else if (LockingMode == LM_LEGACY) {
1307 // Load the old header from BasicLock structure
1308 movptr(header_reg, Address(swap_reg,
1309 BasicLock::displaced_header_offset_in_bytes()));
1310
1311 // Test for recursion
1312 testptr(header_reg, header_reg);
1313
1314 // zero for recursive case
1315 jcc(Assembler::zero, count_locking);
1316
1317 // Atomic swap back the old header
1318 lock();
1319 cmpxchgptr(header_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
1320
1321 // zero for simple unlock of a stack-lock case
1322 jcc(Assembler::notZero, slow_case);
1323
1324 bind(count_locking);
|