< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.cpp

Print this page

   1 /*
   2  * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

5221 
5222 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
5223   // get mirror
5224   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
5225   load_method_holder(mirror, method);
5226   movptr(mirror, Address(mirror, mirror_offset));
5227   resolve_oop_handle(mirror, tmp);
5228 }
5229 
5230 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
5231   load_method_holder(rresult, rmethod);
5232   movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
5233 }
5234 
5235 void MacroAssembler::load_method_holder(Register holder, Register method) {
5236   movptr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
5237   movptr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
5238   movptr(holder, Address(holder, ConstantPool::pool_holder_offset()));          // InstanceKlass*
5239 }
5240 

















5241 void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
5242   assert_different_registers(src, tmp);
5243   assert_different_registers(dst, tmp);
5244 #ifdef _LP64
5245   if (UseCompressedClassPointers) {



5246     movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5247     decode_klass_not_null(dst, tmp);
5248   } else
5249 #endif

5250     movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));

5251 }
5252 
5253 void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {

5254   assert_different_registers(src, tmp);
5255   assert_different_registers(dst, tmp);
5256 #ifdef _LP64
5257   if (UseCompressedClassPointers) {
5258     encode_klass_not_null(src, tmp);
5259     movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5260   } else
5261 #endif
5262     movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5263 }
5264 

































5265 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
5266                                     Register tmp1, Register thread_tmp) {
5267   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5268   decorators = AccessInternal::decorator_fixup(decorators, type);
5269   bool as_raw = (decorators & AS_RAW) != 0;
5270   if (as_raw) {
5271     bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5272   } else {
5273     bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5274   }
5275 }
5276 
5277 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register val,
5278                                      Register tmp1, Register tmp2, Register tmp3) {
5279   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5280   decorators = AccessInternal::decorator_fixup(decorators, type);
5281   bool as_raw = (decorators & AS_RAW) != 0;
5282   if (as_raw) {
5283     bs->BarrierSetAssembler::store_at(this, decorators, type, dst, val, tmp1, tmp2, tmp3);
5284   } else {

5292 }
5293 
5294 // Doesn't do verification, generates fixed size code
5295 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
5296                                             Register thread_tmp, DecoratorSet decorators) {
5297   access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
5298 }
5299 
5300 void MacroAssembler::store_heap_oop(Address dst, Register val, Register tmp1,
5301                                     Register tmp2, Register tmp3, DecoratorSet decorators) {
5302   access_store_at(T_OBJECT, IN_HEAP | decorators, dst, val, tmp1, tmp2, tmp3);
5303 }
5304 
5305 // Used for storing nulls.
5306 void MacroAssembler::store_heap_oop_null(Address dst) {
5307   access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
5308 }
5309 
5310 #ifdef _LP64
5311 void MacroAssembler::store_klass_gap(Register dst, Register src) {

5312   if (UseCompressedClassPointers) {
5313     // Store to klass gap in destination
5314     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
5315   }
5316 }
5317 
5318 #ifdef ASSERT
5319 void MacroAssembler::verify_heapbase(const char* msg) {
5320   assert (UseCompressedOops, "should be compressed");
5321   assert (Universe::heap() != nullptr, "java heap should be initialized");
5322   if (CheckCompressedOops) {
5323     Label ok;
5324     ExternalAddress src2(CompressedOops::ptrs_base_addr());
5325     const bool is_src2_reachable = reachable(src2);
5326     if (!is_src2_reachable) {
5327       push(rscratch1);  // cmpptr trashes rscratch1
5328     }
5329     cmpptr(r12_heapbase, src2, rscratch1);
5330     jcc(Assembler::equal, ok);
5331     STOP(msg);

9785 
9786 #endif // !WIN32 || _LP64
9787 
9788 void MacroAssembler::check_stack_alignment(Register sp, const char* msg, unsigned bias, Register tmp) {
9789   Label L_stack_ok;
9790   if (bias == 0) {
9791     testptr(sp, 2 * wordSize - 1);
9792   } else {
9793     // lea(tmp, Address(rsp, bias);
9794     mov(tmp, sp);
9795     addptr(tmp, bias);
9796     testptr(tmp, 2 * wordSize - 1);
9797   }
9798   jcc(Assembler::equal, L_stack_ok);
9799   block_comment(msg);
9800   stop(msg);
9801   bind(L_stack_ok);
9802 }
9803 
9804 // Implements lightweight-locking.
9805 // Branches to slow upon failure to lock the object, with ZF cleared.
9806 // Falls through upon success with unspecified ZF.
9807 //
9808 // obj: the object to be locked
9809 // hdr: the (pre-loaded) header of the object, must be rax
9810 // thread: the thread which attempts to lock obj
9811 // tmp: a temporary register
9812 void MacroAssembler::lightweight_lock(Register obj, Register hdr, Register thread, Register tmp, Label& slow) {
9813   assert(hdr == rax, "header must be in rax for cmpxchg");
9814   assert_different_registers(obj, hdr, thread, tmp);
9815 
9816   // First we need to check if the lock-stack has room for pushing the object reference.
9817   // Note: we subtract 1 from the end-offset so that we can do a 'greater' comparison, instead
9818   // of 'greaterEqual' below, which readily clears the ZF. This makes C2 code a little simpler and
9819   // avoids one branch.
9820   cmpl(Address(thread, JavaThread::lock_stack_top_offset()), LockStack::end_offset() - 1);
9821   jcc(Assembler::greater, slow);
9822 
9823   // Now we attempt to take the fast-lock.
9824   // Clear lock_mask bits (locked state).
9825   andptr(hdr, ~(int32_t)markWord::lock_mask_in_place);
9826   movptr(tmp, hdr);
9827   // Set unlocked_value bit.
9828   orptr(hdr, markWord::unlocked_value);
9829   lock();
9830   cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));












9831   jcc(Assembler::notEqual, slow);
9832 
9833   // If successful, push object to lock-stack.
9834   movl(tmp, Address(thread, JavaThread::lock_stack_top_offset()));
9835   movptr(Address(thread, tmp), obj);
9836   incrementl(tmp, oopSize);
9837   movl(Address(thread, JavaThread::lock_stack_top_offset()), tmp);



9838 }
9839 
9840 // Implements lightweight-unlocking.
9841 // Branches to slow upon failure, with ZF cleared.
9842 // Falls through upon success, with unspecified ZF.
9843 //
9844 // obj: the object to be unlocked
9845 // hdr: the (pre-loaded) header of the object, must be rax

9846 // tmp: a temporary register
9847 void MacroAssembler::lightweight_unlock(Register obj, Register hdr, Register tmp, Label& slow) {
9848   assert(hdr == rax, "header must be in rax for cmpxchg");
9849   assert_different_registers(obj, hdr, tmp);
9850 
9851   // Mark-word must be lock_mask now, try to swing it back to unlocked_value.
9852   movptr(tmp, hdr); // The expected old value
9853   orptr(tmp, markWord::unlocked_value);
9854   lock();
9855   cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));





9856   jcc(Assembler::notEqual, slow);
9857   // Pop the lock object from the lock-stack.
9858 #ifdef _LP64
9859   const Register thread = r15_thread;
9860 #else
9861   const Register thread = rax;
9862   get_thread(thread);
9863 #endif
9864   subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);










9865 #ifdef ASSERT
9866   movl(tmp, Address(thread, JavaThread::lock_stack_top_offset()));
9867   movptr(Address(thread, tmp), 0);




9868 #endif





















9869 }

   1 /*
   2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *

5221 
5222 void MacroAssembler::load_mirror(Register mirror, Register method, Register tmp) {
5223   // get mirror
5224   const int mirror_offset = in_bytes(Klass::java_mirror_offset());
5225   load_method_holder(mirror, method);
5226   movptr(mirror, Address(mirror, mirror_offset));
5227   resolve_oop_handle(mirror, tmp);
5228 }
5229 
5230 void MacroAssembler::load_method_holder_cld(Register rresult, Register rmethod) {
5231   load_method_holder(rresult, rmethod);
5232   movptr(rresult, Address(rresult, InstanceKlass::class_loader_data_offset()));
5233 }
5234 
5235 void MacroAssembler::load_method_holder(Register holder, Register method) {
5236   movptr(holder, Address(method, Method::const_offset()));                      // ConstMethod*
5237   movptr(holder, Address(holder, ConstMethod::constants_offset()));             // ConstantPool*
5238   movptr(holder, Address(holder, ConstantPool::pool_holder_offset()));          // InstanceKlass*
5239 }
5240 
5241 #ifdef _LP64
5242 void MacroAssembler::load_nklass_compact(Register dst, Register src) {
5243   assert(UseCompactObjectHeaders, "expect compact object headers");
5244 
5245   Label fast;
5246   movq(dst, Address(src, oopDesc::mark_offset_in_bytes()));
5247   testb(dst, markWord::monitor_value);
5248   jccb(Assembler::zero, fast);
5249 
5250   // Fetch displaced header
5251   movq(dst, Address(dst, OM_OFFSET_NO_MONITOR_VALUE_TAG(header)));
5252 
5253   bind(fast);
5254   shrq(dst, markWord::klass_shift);
5255 }
5256 #endif
5257 
5258 void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
5259   assert_different_registers(src, tmp);
5260   assert_different_registers(dst, tmp);
5261 #ifdef _LP64
5262   if (UseCompactObjectHeaders) {
5263     load_nklass_compact(dst, src);
5264     decode_klass_not_null(dst, tmp);
5265   } else if (UseCompressedClassPointers) {
5266     movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5267     decode_klass_not_null(dst, tmp);
5268   } else
5269 #endif
5270   {
5271     movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
5272   }
5273 }
5274 
5275 void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
5276   assert(!UseCompactObjectHeaders, "not with compact headers");
5277   assert_different_registers(src, tmp);
5278   assert_different_registers(dst, tmp);
5279 #ifdef _LP64
5280   if (UseCompressedClassPointers) {
5281     encode_klass_not_null(src, tmp);
5282     movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5283   } else
5284 #endif
5285     movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
5286 }
5287 
5288 void MacroAssembler::cmp_klass(Register klass, Register obj, Register tmp) {
5289 #ifdef _LP64
5290   if (UseCompactObjectHeaders) {
5291     load_nklass_compact(tmp, obj);
5292     cmpl(klass, tmp);
5293   } else if (UseCompressedClassPointers) {
5294     cmpl(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
5295   } else
5296 #endif
5297   {
5298     cmpptr(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
5299   }
5300 }
5301 
5302 void MacroAssembler::cmp_klass(Register src, Register dst, Register tmp1, Register tmp2) {
5303 #ifdef _LP64
5304   if (UseCompactObjectHeaders) {
5305     assert(tmp2 != noreg, "need tmp2");
5306     assert_different_registers(src, dst, tmp1, tmp2);
5307     load_nklass_compact(tmp1, src);
5308     load_nklass_compact(tmp2, dst);
5309     cmpl(tmp1, tmp2);
5310   } else if (UseCompressedClassPointers) {
5311     movl(tmp1, Address(src, oopDesc::klass_offset_in_bytes()));
5312     cmpl(tmp1, Address(dst, oopDesc::klass_offset_in_bytes()));
5313   } else
5314 #endif
5315   {
5316     movptr(tmp1, Address(src, oopDesc::klass_offset_in_bytes()));
5317     cmpptr(tmp1, Address(dst, oopDesc::klass_offset_in_bytes()));
5318   }
5319 }
5320 
5321 void MacroAssembler::access_load_at(BasicType type, DecoratorSet decorators, Register dst, Address src,
5322                                     Register tmp1, Register thread_tmp) {
5323   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5324   decorators = AccessInternal::decorator_fixup(decorators, type);
5325   bool as_raw = (decorators & AS_RAW) != 0;
5326   if (as_raw) {
5327     bs->BarrierSetAssembler::load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5328   } else {
5329     bs->load_at(this, decorators, type, dst, src, tmp1, thread_tmp);
5330   }
5331 }
5332 
5333 void MacroAssembler::access_store_at(BasicType type, DecoratorSet decorators, Address dst, Register val,
5334                                      Register tmp1, Register tmp2, Register tmp3) {
5335   BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
5336   decorators = AccessInternal::decorator_fixup(decorators, type);
5337   bool as_raw = (decorators & AS_RAW) != 0;
5338   if (as_raw) {
5339     bs->BarrierSetAssembler::store_at(this, decorators, type, dst, val, tmp1, tmp2, tmp3);
5340   } else {

5348 }
5349 
5350 // Doesn't do verification, generates fixed size code
5351 void MacroAssembler::load_heap_oop_not_null(Register dst, Address src, Register tmp1,
5352                                             Register thread_tmp, DecoratorSet decorators) {
5353   access_load_at(T_OBJECT, IN_HEAP | IS_NOT_NULL | decorators, dst, src, tmp1, thread_tmp);
5354 }
5355 
5356 void MacroAssembler::store_heap_oop(Address dst, Register val, Register tmp1,
5357                                     Register tmp2, Register tmp3, DecoratorSet decorators) {
5358   access_store_at(T_OBJECT, IN_HEAP | decorators, dst, val, tmp1, tmp2, tmp3);
5359 }
5360 
5361 // Used for storing nulls.
5362 void MacroAssembler::store_heap_oop_null(Address dst) {
5363   access_store_at(T_OBJECT, IN_HEAP, dst, noreg, noreg, noreg, noreg);
5364 }
5365 
5366 #ifdef _LP64
5367 void MacroAssembler::store_klass_gap(Register dst, Register src) {
5368   assert(!UseCompactObjectHeaders, "Don't use with compact headers");
5369   if (UseCompressedClassPointers) {
5370     // Store to klass gap in destination
5371     movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
5372   }
5373 }
5374 
5375 #ifdef ASSERT
5376 void MacroAssembler::verify_heapbase(const char* msg) {
5377   assert (UseCompressedOops, "should be compressed");
5378   assert (Universe::heap() != nullptr, "java heap should be initialized");
5379   if (CheckCompressedOops) {
5380     Label ok;
5381     ExternalAddress src2(CompressedOops::ptrs_base_addr());
5382     const bool is_src2_reachable = reachable(src2);
5383     if (!is_src2_reachable) {
5384       push(rscratch1);  // cmpptr trashes rscratch1
5385     }
5386     cmpptr(r12_heapbase, src2, rscratch1);
5387     jcc(Assembler::equal, ok);
5388     STOP(msg);

9842 
9843 #endif // !WIN32 || _LP64
9844 
9845 void MacroAssembler::check_stack_alignment(Register sp, const char* msg, unsigned bias, Register tmp) {
9846   Label L_stack_ok;
9847   if (bias == 0) {
9848     testptr(sp, 2 * wordSize - 1);
9849   } else {
9850     // lea(tmp, Address(rsp, bias);
9851     mov(tmp, sp);
9852     addptr(tmp, bias);
9853     testptr(tmp, 2 * wordSize - 1);
9854   }
9855   jcc(Assembler::equal, L_stack_ok);
9856   block_comment(msg);
9857   stop(msg);
9858   bind(L_stack_ok);
9859 }
9860 
9861 // Implements lightweight-locking.


9862 //
9863 // obj: the object to be locked
9864 // reg_rax: rax
9865 // thread: the thread which attempts to lock obj
9866 // tmp: a temporary register
9867 void MacroAssembler::lightweight_lock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow) {
9868   assert(reg_rax == rax, "");
9869   assert_different_registers(obj, reg_rax, thread, tmp);
9870 
9871   Label push;
9872   const Register top = tmp;
9873 
9874   // Preload the markWord. It is important that this is the first
9875   // instruction emitted as it is part of C1's null check semantics.
9876   movptr(reg_rax, Address(obj, oopDesc::mark_offset_in_bytes()));
9877 
9878   // Load top.
9879   movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9880 
9881   // Check if the lock-stack is full.
9882   cmpl(top, LockStack::end_offset());
9883   jcc(Assembler::greaterEqual, slow);
9884 
9885   // Check for recursion.
9886   cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
9887   jcc(Assembler::equal, push);
9888 
9889   // Check header for monitor (0b10).
9890   testptr(reg_rax, markWord::monitor_value);
9891   jcc(Assembler::notZero, slow);
9892 
9893   // Try to lock. Transition lock bits 0b01 => 0b00
9894   movptr(tmp, reg_rax);
9895   andptr(tmp, ~(int32_t)markWord::unlocked_value);
9896   orptr(reg_rax, markWord::unlocked_value);
9897   lock(); cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9898   jcc(Assembler::notEqual, slow);
9899 
9900   // Restore top, CAS clobbers register.
9901   movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9902 
9903   bind(push);
9904   // After successful lock, push object on lock-stack.
9905   movptr(Address(thread, top), obj);
9906   incrementl(top, oopSize);
9907   movl(Address(thread, JavaThread::lock_stack_top_offset()), top);
9908 }
9909 
9910 // Implements lightweight-unlocking.


9911 //
9912 // obj: the object to be unlocked
9913 // reg_rax: rax
9914 // thread: the thread
9915 // tmp: a temporary register
9916 //
9917 // x86_32 Note: reg_rax and thread may alias each other due to limited register
9918 //              availiability.
9919 void MacroAssembler::lightweight_unlock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow) {
9920   assert(reg_rax == rax, "");
9921   assert_different_registers(obj, reg_rax, tmp);
9922   LP64_ONLY(assert_different_registers(obj, reg_rax, thread, tmp);)
9923 
9924   Label unlocked, push_and_slow;
9925   const Register top = tmp;
9926 
9927   // Check if obj is top of lock-stack.
9928   movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9929   cmpptr(obj, Address(thread, top, Address::times_1, -oopSize));
9930   jcc(Assembler::notEqual, slow);
9931 
9932   // Pop lock-stack.
9933   DEBUG_ONLY(movptr(Address(thread, top, Address::times_1, -oopSize), 0);)




9934   subl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
9935 
9936   // Check if recursive.
9937   cmpptr(obj, Address(thread, top, Address::times_1, -2 * oopSize));
9938   jcc(Assembler::equal, unlocked);
9939 
9940   // Not recursive. Check header for monitor (0b10).
9941   movptr(reg_rax, Address(obj, oopDesc::mark_offset_in_bytes()));
9942   testptr(reg_rax, markWord::monitor_value);
9943   jcc(Assembler::notZero, push_and_slow);
9944 
9945 #ifdef ASSERT
9946   // Check header not unlocked (0b01).
9947   Label not_unlocked;
9948   testptr(reg_rax, markWord::unlocked_value);
9949   jcc(Assembler::zero, not_unlocked);
9950   stop("lightweight_unlock already unlocked");
9951   bind(not_unlocked);
9952 #endif
9953 
9954   // Try to unlock. Transition lock bits 0b00 => 0b01
9955   movptr(tmp, reg_rax);
9956   orptr(tmp, markWord::unlocked_value);
9957   lock(); cmpxchgptr(tmp, Address(obj, oopDesc::mark_offset_in_bytes()));
9958   jcc(Assembler::equal, unlocked);
9959 
9960   bind(push_and_slow);
9961   // Restore lock-stack and handle the unlock in runtime.
9962   if (thread == reg_rax) {
9963     // On x86_32 we may lose the thread.
9964     get_thread(thread);
9965   }
9966 #ifdef ASSERT
9967   movl(top, Address(thread, JavaThread::lock_stack_top_offset()));
9968   movptr(Address(thread, top), obj);
9969 #endif
9970   addl(Address(thread, JavaThread::lock_stack_top_offset()), oopSize);
9971   jmp(slow);
9972 
9973   bind(unlocked);
9974 }
< prev index next >