217 }
218 }
219
220 void InterpreterOopMap::print() const {
221 int n = number_of_entries();
222 tty->print("oop map for ");
223 method()->print_value();
224 tty->print(" @ %d = [%d] { ", bci(), n);
225 for (int i = 0; i < n; i++) {
226 if (is_dead(i)) tty->print("%d+ ", i);
227 else
228 if (is_oop(i)) tty->print("%d ", i);
229 }
230 tty->print_cr("}");
231 }
232
233 class MaskFillerForNative: public NativeSignatureIterator {
234 private:
235 uintptr_t * _mask; // the bit mask to be filled
236 int _size; // the mask size in bits
237
238 void set_one(int i) {
239 i *= InterpreterOopMap::bits_per_entry;
240 assert(0 <= i && i < _size, "offset out of bounds");
241 _mask[i / BitsPerWord] |= (((uintptr_t) 1 << InterpreterOopMap::oop_bit_number) << (i % BitsPerWord));
242 }
243
244 public:
245 void pass_byte() { /* ignore */ }
246 void pass_short() { /* ignore */ }
247 void pass_int() { /* ignore */ }
248 void pass_long() { /* ignore */ }
249 void pass_float() { /* ignore */ }
250 void pass_double() { /* ignore */ }
251 void pass_object() { set_one(offset()); }
252
253 MaskFillerForNative(const methodHandle& method, uintptr_t* mask, int size) : NativeSignatureIterator(method) {
254 _mask = mask;
255 _size = size;
256 // initialize with 0
257 int i = (size + BitsPerWord - 1) / BitsPerWord;
258 while (i-- > 0) _mask[i] = 0;
259 }
260
261 void generate() {
262 iterate();
263 }
264 };
265
266 bool OopMapCacheEntry::verify_mask(CellTypeState* vars, CellTypeState* stack, int max_locals, int stack_top) {
267 // Check mask includes map
268 VerifyClosure blk(this);
269 iterate_oop(&blk);
270 if (blk.failed()) return false;
271
272 // Check if map is generated correctly
273 // (Use ?: operator to make sure all 'true' & 'false' are represented exactly the same so we can use == afterwards)
274 const bool log = log_is_enabled(Trace, interpreter, oopmap);
275 LogStream st(Log(interpreter, oopmap)::trace());
276
277 if (log) st.print("Locals (%d): ", max_locals);
278 for(int i = 0; i < max_locals; i++) {
279 bool v1 = is_oop(i) ? true : false;
280 bool v2 = vars[i].is_reference() ? true : false;
281 assert(v1 == v2, "locals oop mask generation error");
282 if (log) st.print("%d", v1 ? 1 : 0);
283 }
302 }
303 }
304
305 void OopMapCacheEntry::deallocate_bit_mask() {
306 if (mask_size() > small_mask_limit && _bit_mask[0] != 0) {
307 assert(!Thread::current()->resource_area()->contains((void*)_bit_mask[0]),
308 "This bit mask should not be in the resource area");
309 FREE_C_HEAP_ARRAY(uintptr_t, _bit_mask[0]);
310 debug_only(_bit_mask[0] = 0;)
311 }
312 }
313
314
315 void OopMapCacheEntry::fill_for_native(const methodHandle& mh) {
316 assert(mh->is_native(), "method must be native method");
317 set_mask_size(mh->size_of_parameters() * bits_per_entry);
318 allocate_bit_mask();
319 // fill mask for parameters
320 MaskFillerForNative mf(mh, bit_mask(), mask_size());
321 mf.generate();
322 }
323
324
325 void OopMapCacheEntry::fill(const methodHandle& method, int bci) {
326 // Flush entry to deallocate an existing entry
327 flush();
328 set_method(method());
329 set_bci(checked_cast<unsigned short>(bci)); // bci is always u2
330 if (method->is_native()) {
331 // Native method activations have oops only among the parameters and one
332 // extra oop following the parameters (the mirror for static native methods).
333 fill_for_native(method);
334 } else {
335 OopMapForCacheEntry gen(method, bci, this);
336 if (!gen.compute_map(Thread::current())) {
337 fatal("Unrecoverable verification or out-of-memory error");
338 }
339 }
340 }
341
|
217 }
218 }
219
220 void InterpreterOopMap::print() const {
221 int n = number_of_entries();
222 tty->print("oop map for ");
223 method()->print_value();
224 tty->print(" @ %d = [%d] { ", bci(), n);
225 for (int i = 0; i < n; i++) {
226 if (is_dead(i)) tty->print("%d+ ", i);
227 else
228 if (is_oop(i)) tty->print("%d ", i);
229 }
230 tty->print_cr("}");
231 }
232
233 class MaskFillerForNative: public NativeSignatureIterator {
234 private:
235 uintptr_t * _mask; // the bit mask to be filled
236 int _size; // the mask size in bits
237 int _num_oops;
238
239 void set_one(int i) {
240 _num_oops++;
241 i *= InterpreterOopMap::bits_per_entry;
242 assert(0 <= i && i < _size, "offset out of bounds");
243 _mask[i / BitsPerWord] |= (((uintptr_t) 1 << InterpreterOopMap::oop_bit_number) << (i % BitsPerWord));
244 }
245
246 public:
247 void pass_byte() { /* ignore */ }
248 void pass_short() { /* ignore */ }
249 void pass_int() { /* ignore */ }
250 void pass_long() { /* ignore */ }
251 void pass_float() { /* ignore */ }
252 void pass_double() { /* ignore */ }
253 void pass_object() { set_one(offset()); }
254
255 MaskFillerForNative(const methodHandle& method, uintptr_t* mask, int size) : NativeSignatureIterator(method) {
256 _mask = mask;
257 _size = size;
258 _num_oops = 0;
259 // initialize with 0
260 int i = (size + BitsPerWord - 1) / BitsPerWord;
261 while (i-- > 0) _mask[i] = 0;
262 }
263
264 void generate() {
265 iterate();
266 }
267
268 int num_oops() { return _num_oops; }
269 };
270
271 bool OopMapCacheEntry::verify_mask(CellTypeState* vars, CellTypeState* stack, int max_locals, int stack_top) {
272 // Check mask includes map
273 VerifyClosure blk(this);
274 iterate_oop(&blk);
275 if (blk.failed()) return false;
276
277 // Check if map is generated correctly
278 // (Use ?: operator to make sure all 'true' & 'false' are represented exactly the same so we can use == afterwards)
279 const bool log = log_is_enabled(Trace, interpreter, oopmap);
280 LogStream st(Log(interpreter, oopmap)::trace());
281
282 if (log) st.print("Locals (%d): ", max_locals);
283 for(int i = 0; i < max_locals; i++) {
284 bool v1 = is_oop(i) ? true : false;
285 bool v2 = vars[i].is_reference() ? true : false;
286 assert(v1 == v2, "locals oop mask generation error");
287 if (log) st.print("%d", v1 ? 1 : 0);
288 }
307 }
308 }
309
310 void OopMapCacheEntry::deallocate_bit_mask() {
311 if (mask_size() > small_mask_limit && _bit_mask[0] != 0) {
312 assert(!Thread::current()->resource_area()->contains((void*)_bit_mask[0]),
313 "This bit mask should not be in the resource area");
314 FREE_C_HEAP_ARRAY(uintptr_t, _bit_mask[0]);
315 debug_only(_bit_mask[0] = 0;)
316 }
317 }
318
319
320 void OopMapCacheEntry::fill_for_native(const methodHandle& mh) {
321 assert(mh->is_native(), "method must be native method");
322 set_mask_size(mh->size_of_parameters() * bits_per_entry);
323 allocate_bit_mask();
324 // fill mask for parameters
325 MaskFillerForNative mf(mh, bit_mask(), mask_size());
326 mf.generate();
327 _num_oops = mf.num_oops();
328 }
329
330
331 void OopMapCacheEntry::fill(const methodHandle& method, int bci) {
332 // Flush entry to deallocate an existing entry
333 flush();
334 set_method(method());
335 set_bci(checked_cast<unsigned short>(bci)); // bci is always u2
336 if (method->is_native()) {
337 // Native method activations have oops only among the parameters and one
338 // extra oop following the parameters (the mirror for static native methods).
339 fill_for_native(method);
340 } else {
341 OopMapForCacheEntry gen(method, bci, this);
342 if (!gen.compute_map(Thread::current())) {
343 fatal("Unrecoverable verification or out-of-memory error");
344 }
345 }
346 }
347
|