939 // } else if (THREAD->is_lock_owned((address)displaced_header))
940 // // Simple recursive case.
941 // monitor->lock()->set_displaced_header(nullptr);
942 // } else {
943 // // Slow path.
944 // InterpreterRuntime::monitorenter(THREAD, monitor);
945 // }
946
947 const Register header = R7_ARG5;
948 const Register object_mark_addr = R8_ARG6;
949 const Register current_header = R9_ARG7;
950 const Register tmp = R10_ARG8;
951
952 Label count_locking, done;
953 Label cas_failed, slow_case;
954
955 assert_different_registers(header, object_mark_addr, current_header, tmp);
956
957 // markWord displaced_header = obj->mark().set_unlocked();
958
959 // Load markWord from object into header.
960 ld(header, oopDesc::mark_offset_in_bytes(), object);
961
962 if (DiagnoseSyncOnValueBasedClasses != 0) {
963 load_klass(tmp, object);
964 lwz(tmp, in_bytes(Klass::access_flags_offset()), tmp);
965 testbitdi(CCR0, R0, tmp, exact_log2(JVM_ACC_IS_VALUE_BASED_CLASS));
966 bne(CCR0, slow_case);
967 }
968
969 if (LockingMode == LM_LIGHTWEIGHT) {
970 lightweight_lock(object, /* mark word */ header, tmp, slow_case);
971 b(count_locking);
972 } else if (LockingMode == LM_LEGACY) {
973
974 // Set displaced_header to be (markWord of object | UNLOCK_VALUE).
975 ori(header, header, markWord::unlocked_value);
976
977 // monitor->lock()->set_displaced_header(displaced_header);
978 const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
979 const int mark_offset = lock_offset +
980 BasicLock::displaced_header_offset_in_bytes();
981
982 // Initialize the box (Must happen before we update the object mark!).
983 std(header, mark_offset, monitor);
984
985 // if (Atomic::cmpxchg(/*addr*/obj->mark_addr(), /*cmp*/displaced_header, /*ex=*/monitor) == displaced_header) {
986
987 // Store stack address of the BasicObjectLock (this is monitor) into object.
988 addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());
989
990 // Must fence, otherwise, preceding store(s) may float below cmpxchg.
991 // CmpxchgX sets CCR0 to cmpX(current, displaced).
992 cmpxchgd(/*flag=*/CCR0,
1084 if (LockingMode != LM_LIGHTWEIGHT) {
1085 // Test first if we are in the fast recursive case.
1086 ld(header, in_bytes(BasicObjectLock::lock_offset()) +
1087 BasicLock::displaced_header_offset_in_bytes(), monitor);
1088
1089 // If the displaced header is zero, we have a recursive unlock.
1090 cmpdi(CCR0, header, 0);
1091 beq(CCR0, free_slot); // recursive unlock
1092 }
1093
1094 // } else if (Atomic::cmpxchg(obj->mark_addr(), monitor, displaced_header) == monitor) {
1095 // // We swapped the unlocked mark in displaced_header into the object's mark word.
1096 // monitor->set_obj(nullptr);
1097
1098 // If we still have a lightweight lock, unlock the object and be done.
1099
1100 // The object address from the monitor is in object.
1101 ld(object, in_bytes(BasicObjectLock::obj_offset()), monitor);
1102
1103 if (LockingMode == LM_LIGHTWEIGHT) {
1104 // Check for non-symmetric locking. This is allowed by the spec and the interpreter
1105 // must handle it.
1106 Register tmp = current_header;
1107 // First check for lock-stack underflow.
1108 lwz(tmp, in_bytes(JavaThread::lock_stack_top_offset()), R16_thread);
1109 cmplwi(CCR0, tmp, (unsigned)LockStack::start_offset());
1110 ble(CCR0, slow_case);
1111 // Then check if the top of the lock-stack matches the unlocked object.
1112 addi(tmp, tmp, -oopSize);
1113 ldx(tmp, tmp, R16_thread);
1114 cmpd(CCR0, tmp, object);
1115 bne(CCR0, slow_case);
1116
1117 ld(header, oopDesc::mark_offset_in_bytes(), object);
1118 andi_(R0, header, markWord::monitor_value);
1119 bne(CCR0, slow_case);
1120 lightweight_unlock(object, header, slow_case);
1121 } else {
1122 addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());
1123
1124 // We have the displaced header in displaced_header. If the lock is still
1125 // lightweight, it will contain the monitor address and we'll store the
1126 // displaced header back into the object's mark word.
1127 // CmpxchgX sets CCR0 to cmpX(current, monitor).
1128 cmpxchgd(/*flag=*/CCR0,
1129 /*current_value=*/current_header,
1130 /*compare_value=*/monitor, /*exchange_value=*/header,
1131 /*where=*/object_mark_addr,
1132 MacroAssembler::MemBarRel,
1133 MacroAssembler::cmpxchgx_hint_release_lock(),
1134 noreg,
1135 &slow_case);
1136 }
1137 b(free_slot);
1138
1139 // } else {
|
939 // } else if (THREAD->is_lock_owned((address)displaced_header))
940 // // Simple recursive case.
941 // monitor->lock()->set_displaced_header(nullptr);
942 // } else {
943 // // Slow path.
944 // InterpreterRuntime::monitorenter(THREAD, monitor);
945 // }
946
947 const Register header = R7_ARG5;
948 const Register object_mark_addr = R8_ARG6;
949 const Register current_header = R9_ARG7;
950 const Register tmp = R10_ARG8;
951
952 Label count_locking, done;
953 Label cas_failed, slow_case;
954
955 assert_different_registers(header, object_mark_addr, current_header, tmp);
956
957 // markWord displaced_header = obj->mark().set_unlocked();
958
959 if (DiagnoseSyncOnValueBasedClasses != 0) {
960 load_klass(tmp, object);
961 lwz(tmp, in_bytes(Klass::access_flags_offset()), tmp);
962 testbitdi(CCR0, R0, tmp, exact_log2(JVM_ACC_IS_VALUE_BASED_CLASS));
963 bne(CCR0, slow_case);
964 }
965
966 if (LockingMode == LM_LIGHTWEIGHT) {
967 lightweight_lock(object, header, tmp, slow_case);
968 b(count_locking);
969 } else if (LockingMode == LM_LEGACY) {
970 // Load markWord from object into header.
971 ld(header, oopDesc::mark_offset_in_bytes(), object);
972
973 // Set displaced_header to be (markWord of object | UNLOCK_VALUE).
974 ori(header, header, markWord::unlocked_value);
975
976 // monitor->lock()->set_displaced_header(displaced_header);
977 const int lock_offset = in_bytes(BasicObjectLock::lock_offset());
978 const int mark_offset = lock_offset +
979 BasicLock::displaced_header_offset_in_bytes();
980
981 // Initialize the box (Must happen before we update the object mark!).
982 std(header, mark_offset, monitor);
983
984 // if (Atomic::cmpxchg(/*addr*/obj->mark_addr(), /*cmp*/displaced_header, /*ex=*/monitor) == displaced_header) {
985
986 // Store stack address of the BasicObjectLock (this is monitor) into object.
987 addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());
988
989 // Must fence, otherwise, preceding store(s) may float below cmpxchg.
990 // CmpxchgX sets CCR0 to cmpX(current, displaced).
991 cmpxchgd(/*flag=*/CCR0,
1083 if (LockingMode != LM_LIGHTWEIGHT) {
1084 // Test first if we are in the fast recursive case.
1085 ld(header, in_bytes(BasicObjectLock::lock_offset()) +
1086 BasicLock::displaced_header_offset_in_bytes(), monitor);
1087
1088 // If the displaced header is zero, we have a recursive unlock.
1089 cmpdi(CCR0, header, 0);
1090 beq(CCR0, free_slot); // recursive unlock
1091 }
1092
1093 // } else if (Atomic::cmpxchg(obj->mark_addr(), monitor, displaced_header) == monitor) {
1094 // // We swapped the unlocked mark in displaced_header into the object's mark word.
1095 // monitor->set_obj(nullptr);
1096
1097 // If we still have a lightweight lock, unlock the object and be done.
1098
1099 // The object address from the monitor is in object.
1100 ld(object, in_bytes(BasicObjectLock::obj_offset()), monitor);
1101
1102 if (LockingMode == LM_LIGHTWEIGHT) {
1103 lightweight_unlock(object, header, slow_case);
1104 } else {
1105 addi(object_mark_addr, object, oopDesc::mark_offset_in_bytes());
1106
1107 // We have the displaced header in displaced_header. If the lock is still
1108 // lightweight, it will contain the monitor address and we'll store the
1109 // displaced header back into the object's mark word.
1110 // CmpxchgX sets CCR0 to cmpX(current, monitor).
1111 cmpxchgd(/*flag=*/CCR0,
1112 /*current_value=*/current_header,
1113 /*compare_value=*/monitor, /*exchange_value=*/header,
1114 /*where=*/object_mark_addr,
1115 MacroAssembler::MemBarRel,
1116 MacroAssembler::cmpxchgx_hint_release_lock(),
1117 noreg,
1118 &slow_case);
1119 }
1120 b(free_slot);
1121
1122 // } else {
|