< prev index next >

src/hotspot/cpu/ppc/c1_MacroAssembler_ppc.cpp

Print this page

116     // hence we are done.
117   } else {
118     assert(false, "Unhandled LockingMode:%d", LockingMode);
119   }
120   b(done);
121 
122   bind(slow_int);
123   b(slow_case); // far
124 
125   if (LockingMode == LM_LEGACY) {
126     bind(cas_failed);
127     // We did not find an unlocked object so see if this is a recursive case.
128     sub(Rscratch, Rscratch, R1_SP);
129     load_const_optimized(R0, (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
130     and_(R0/*==0?*/, Rscratch, R0);
131     std(R0/*==0, perhaps*/, BasicLock::displaced_header_offset_in_bytes(), Rbox);
132     bne(CCR0, slow_int);
133   }
134 
135   bind(done);
136   inc_held_monitor_count(Rmark /*tmp*/);


137 }
138 
139 
140 void C1_MacroAssembler::unlock_object(Register Rmark, Register Roop, Register Rbox, Label& slow_case) {
141   assert_different_registers(Rmark, Roop, Rbox);
142 
143   Label slow_int, done;
144 
145   Address mark_addr(Roop, oopDesc::mark_offset_in_bytes());
146   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
147 
148   if (LockingMode != LM_LIGHTWEIGHT) {
149     // Test first if it is a fast recursive unlock.
150     ld(Rmark, BasicLock::displaced_header_offset_in_bytes(), Rbox);
151     cmpdi(CCR0, Rmark, 0);
152     beq(CCR0, done);
153   }
154 
155   // Load object.
156   ld(Roop, in_bytes(BasicObjectLock::obj_offset()), Rbox);

162     // Check if it is still a light weight lock, this is is true if we see
163     // the stack address of the basicLock in the markWord of the object.
164     cmpxchgd(/*flag=*/CCR0,
165              /*current_value=*/R0,
166              /*compare_value=*/Rbox,
167              /*exchange_value=*/Rmark,
168              /*where=*/Roop,
169              MacroAssembler::MemBarRel,
170              MacroAssembler::cmpxchgx_hint_release_lock(),
171              noreg,
172              &slow_int);
173   } else {
174     assert(false, "Unhandled LockingMode:%d", LockingMode);
175   }
176   b(done);
177   bind(slow_int);
178   b(slow_case); // far
179 
180   // Done
181   bind(done);
182   dec_held_monitor_count(Rmark /*tmp*/);


183 }
184 
185 
186 void C1_MacroAssembler::try_allocate(
187   Register obj,                        // result: pointer to object after successful allocation
188   Register var_size_in_bytes,          // object size in bytes if unknown at compile time; invalid otherwise
189   int      con_size_in_bytes,          // object size in bytes if   known at compile time
190   Register t1,                         // temp register
191   Register t2,                         // temp register
192   Label&   slow_case                   // continuation point if fast allocation fails
193 ) {
194   if (UseTLAB) {
195     tlab_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, slow_case);
196   } else {
197     b(slow_case);
198   }
199 }
200 
201 
202 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1, Register t2) {

116     // hence we are done.
117   } else {
118     assert(false, "Unhandled LockingMode:%d", LockingMode);
119   }
120   b(done);
121 
122   bind(slow_int);
123   b(slow_case); // far
124 
125   if (LockingMode == LM_LEGACY) {
126     bind(cas_failed);
127     // We did not find an unlocked object so see if this is a recursive case.
128     sub(Rscratch, Rscratch, R1_SP);
129     load_const_optimized(R0, (~(os::vm_page_size()-1) | markWord::lock_mask_in_place));
130     and_(R0/*==0?*/, Rscratch, R0);
131     std(R0/*==0, perhaps*/, BasicLock::displaced_header_offset_in_bytes(), Rbox);
132     bne(CCR0, slow_int);
133   }
134 
135   bind(done);
136   if (LockingMode == LM_LEGACY) {
137     inc_held_monitor_count(Rmark /*tmp*/);
138   }
139 }
140 
141 
142 void C1_MacroAssembler::unlock_object(Register Rmark, Register Roop, Register Rbox, Label& slow_case) {
143   assert_different_registers(Rmark, Roop, Rbox);
144 
145   Label slow_int, done;
146 
147   Address mark_addr(Roop, oopDesc::mark_offset_in_bytes());
148   assert(mark_addr.disp() == 0, "cas must take a zero displacement");
149 
150   if (LockingMode != LM_LIGHTWEIGHT) {
151     // Test first if it is a fast recursive unlock.
152     ld(Rmark, BasicLock::displaced_header_offset_in_bytes(), Rbox);
153     cmpdi(CCR0, Rmark, 0);
154     beq(CCR0, done);
155   }
156 
157   // Load object.
158   ld(Roop, in_bytes(BasicObjectLock::obj_offset()), Rbox);

164     // Check if it is still a light weight lock, this is is true if we see
165     // the stack address of the basicLock in the markWord of the object.
166     cmpxchgd(/*flag=*/CCR0,
167              /*current_value=*/R0,
168              /*compare_value=*/Rbox,
169              /*exchange_value=*/Rmark,
170              /*where=*/Roop,
171              MacroAssembler::MemBarRel,
172              MacroAssembler::cmpxchgx_hint_release_lock(),
173              noreg,
174              &slow_int);
175   } else {
176     assert(false, "Unhandled LockingMode:%d", LockingMode);
177   }
178   b(done);
179   bind(slow_int);
180   b(slow_case); // far
181 
182   // Done
183   bind(done);
184   if (LockingMode == LM_LEGACY) {
185     dec_held_monitor_count(Rmark /*tmp*/);
186   }
187 }
188 
189 
190 void C1_MacroAssembler::try_allocate(
191   Register obj,                        // result: pointer to object after successful allocation
192   Register var_size_in_bytes,          // object size in bytes if unknown at compile time; invalid otherwise
193   int      con_size_in_bytes,          // object size in bytes if   known at compile time
194   Register t1,                         // temp register
195   Register t2,                         // temp register
196   Label&   slow_case                   // continuation point if fast allocation fails
197 ) {
198   if (UseTLAB) {
199     tlab_allocate(obj, var_size_in_bytes, con_size_in_bytes, t1, slow_case);
200   } else {
201     b(slow_case);
202   }
203 }
204 
205 
206 void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1, Register t2) {
< prev index next >