< prev index next >

src/hotspot/share/runtime/vframe.cpp

Print this page




  72     if (cb->is_compiled()) {
  73       CompiledMethod* nm = (CompiledMethod*)cb;
  74       return new compiledVFrame(f, reg_map, thread, nm);
  75     }
  76 
  77     if (f->is_runtime_frame()) {
  78       // Skip this frame and try again.
  79       RegisterMap temp_map = *reg_map;
  80       frame s = f->sender(&temp_map);
  81       return new_vframe(&s, &temp_map, thread);
  82     }
  83   }
  84 
  85   // External frame
  86   return new externalVFrame(f, reg_map, thread);
  87 }
  88 
  89 vframe* vframe::sender() const {
  90   RegisterMap temp_map = *register_map();
  91   assert(is_top(), "just checking");

  92   if (_fr.is_entry_frame() && _fr.is_first_frame()) return NULL;
  93   frame s = _fr.real_sender(&temp_map);
  94   if (s.is_first_frame()) return NULL;
  95   return vframe::new_vframe(&s, &temp_map, thread());
  96 }
  97 
  98 vframe* vframe::top() const {
  99   vframe* vf = (vframe*) this;
 100   while (!vf->is_top()) vf = vf->sender();
 101   return vf;
 102 }
 103 
 104 
 105 javaVFrame* vframe::java_sender() const {
 106   vframe* f = sender();
 107   while (f != NULL) {
 108     if (f->is_java_frame()) return javaVFrame::cast(f);
 109     f = f->sender();
 110   }
 111   return NULL;


 240                 !mark->monitor()->is_entered(thread())
 241               )) {
 242             lock_state = "waiting to lock";
 243           } else {
 244             // We own the monitor which is not as interesting so
 245             // disable the extra printing below.
 246             mark = NULL;
 247           }
 248         }
 249         print_locked_object_class_name(st, Handle(THREAD, monitor->owner()), lock_state);
 250 
 251         found_first_monitor = true;
 252       }
 253     }
 254   }
 255 }
 256 
 257 // ------------- interpretedVFrame --------------
 258 
 259 u_char* interpretedVFrame::bcp() const {
 260   return fr().interpreter_frame_bcp();
 261 }
 262 
 263 void interpretedVFrame::set_bcp(u_char* bcp) {

 264   fr().interpreter_frame_set_bcp(bcp);
 265 }
 266 
 267 intptr_t* interpretedVFrame::locals_addr_at(int offset) const {

 268   assert(fr().is_interpreted_frame(), "frame should be an interpreted frame");
 269   return fr().interpreter_frame_local_at(offset);
 270 }
 271 
 272 
 273 GrowableArray<MonitorInfo*>* interpretedVFrame::monitors() const {
 274   GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(5);
 275   for (BasicObjectLock* current = (fr().previous_monitor_in_interpreter_frame(fr().interpreter_frame_monitor_begin()));
 276        current >= fr().interpreter_frame_monitor_end();
 277        current = fr().previous_monitor_in_interpreter_frame(current)) {
 278     result->push(new MonitorInfo(current->obj(), current->lock(), false, false));


 279   }
 280   return result;
 281 }
 282 
 283 int interpretedVFrame::bci() const {
 284   return method()->bci_from(bcp());
 285 }
 286 
 287 Method* interpretedVFrame::method() const {
 288   return fr().interpreter_frame_method();
 289 }
 290 
 291 static StackValue* create_stack_value_from_oop_map(const InterpreterOopMap& oop_mask,

 292                                                    int index,
 293                                                    const intptr_t* const addr) {
 294 
 295   assert(index >= 0 &&
 296          index < oop_mask.number_of_entries(), "invariant");
 297 
 298   // categorize using oop_mask
 299   if (oop_mask.is_oop(index)) {








 300     // reference (oop) "r"
 301     Handle h(Thread::current(), addr != NULL ? (*(oop*)addr) : (oop)NULL);
 302     return new StackValue(h);
 303   }
 304   // value (integer) "v"
 305   return new StackValue(addr != NULL ? *addr : 0);
 306 }
 307 
 308 static bool is_in_expression_stack(const frame& fr, const intptr_t* const addr) {
 309   assert(addr != NULL, "invariant");
 310 
 311   // Ensure to be 'inside' the expresion stack (i.e., addr >= sp for Intel).
 312   // In case of exceptions, the expression stack is invalid and the sp
 313   // will be reset to express this condition.
 314   if (frame::interpreter_frame_expression_stack_direction() > 0) {
 315     return addr <= fr.interpreter_frame_tos_address();
 316   }
 317 
 318   return addr >= fr.interpreter_frame_tos_address();
 319 }
 320 
 321 static void stack_locals(StackValueCollection* result,
 322                          int length,
 323                          const InterpreterOopMap& oop_mask,
 324                          const frame& fr) {

 325 
 326   assert(result != NULL, "invariant");
 327 
 328   for (int i = 0; i < length; ++i) {
 329     const intptr_t* const addr = fr.interpreter_frame_local_at(i);






 330     assert(addr != NULL, "invariant");
 331     assert(addr >= fr.sp(), "must be inside the frame");
 332 
 333     StackValue* const sv = create_stack_value_from_oop_map(oop_mask, i, addr);
 334     assert(sv != NULL, "sanity check");
 335 
 336     result->add(sv);
 337   }
 338 }
 339 
 340 static void stack_expressions(StackValueCollection* result,
 341                               int length,
 342                               int max_locals,
 343                               const InterpreterOopMap& oop_mask,
 344                               const frame& fr) {

 345 
 346   assert(result != NULL, "invariant");
 347 
 348   for (int i = 0; i < length; ++i) {
 349     const intptr_t* addr = fr.interpreter_frame_expression_stack_at(i);
 350     assert(addr != NULL, "invariant");
 351     if (!is_in_expression_stack(fr, addr)) {
 352       // Need to ensure no bogus escapes.
 353       addr = NULL;





 354     }
 355 
 356     StackValue* const sv = create_stack_value_from_oop_map(oop_mask,

 357                                                            i + max_locals,
 358                                                            addr);
 359     assert(sv != NULL, "sanity check");
 360 
 361     result->add(sv);
 362   }
 363 }
 364 
 365 StackValueCollection* interpretedVFrame::locals() const {
 366   return stack_data(false);
 367 }
 368 
 369 StackValueCollection* interpretedVFrame::expressions() const {
 370   return stack_data(true);
 371 }
 372 
 373 /*
 374  * Worker routine for fetching references and/or values
 375  * for a particular bci in the interpretedVFrame.
 376  *


 387   method()->mask_for(bci(), &oop_mask);
 388   const int mask_len = oop_mask.number_of_entries();
 389 
 390   // If the method is native, method()->max_locals() is not telling the truth.
 391   // For our purposes, max locals instead equals the size of parameters.
 392   const int max_locals = method()->is_native() ?
 393     method()->size_of_parameters() : method()->max_locals();
 394 
 395   assert(mask_len >= max_locals, "invariant");
 396 
 397   const int length = expressions ? mask_len - max_locals : max_locals;
 398   assert(length >= 0, "invariant");
 399 
 400   StackValueCollection* const result = new StackValueCollection(length);
 401 
 402   if (0 == length) {
 403     return result;
 404   }
 405 
 406   if (expressions) {
 407     stack_expressions(result, length, max_locals, oop_mask, fr());
 408   } else {
 409     stack_locals(result, length, oop_mask, fr());
 410   }
 411 
 412   assert(length == result->size(), "invariant");
 413 
 414   return result;
 415 }
 416 
 417 void interpretedVFrame::set_locals(StackValueCollection* values) const {
 418   if (values == NULL || values->size() == 0) return;
 419 
 420   // If the method is native, max_locals is not telling the truth.
 421   // maxlocals then equals the size of parameters
 422   const int max_locals = method()->is_native() ?
 423     method()->size_of_parameters() : method()->max_locals();
 424 
 425   assert(max_locals == values->size(), "Mismatch between actual stack format and supplied data");
 426 
 427   // handle locals
 428   for (int i = 0; i < max_locals; i++) {
 429     // Find stack location


 440   }
 441 }
 442 
 443 // ------------- cChunk --------------
 444 
 445 entryVFrame::entryVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread)
 446 : externalVFrame(fr, reg_map, thread) {}
 447 
 448 #ifdef ASSERT
 449 void vframeStreamCommon::found_bad_method_frame() const {
 450   // 6379830 Cut point for an assertion that occasionally fires when
 451   // we are using the performance analyzer.
 452   // Disable this assert when testing the analyzer with fastdebug.
 453   // -XX:SuppressErrorAt=vframe.cpp:XXX (XXX=following line number)
 454   fatal("invalid bci or invalid scope desc");
 455 }
 456 #endif
 457 
 458 // top-frame will be skipped
 459 vframeStream::vframeStream(JavaThread* thread, frame top_frame,
 460   bool stop_at_java_call_stub) : vframeStreamCommon(thread) {
 461   _stop_at_java_call_stub = stop_at_java_call_stub;
 462 
 463   // skip top frame, as it may not be at safepoint
 464   _prev_frame = top_frame;
 465   _frame  = top_frame.sender(&_reg_map);
 466   while (!fill_from_frame()) {
 467     _prev_frame = _frame;
 468     _frame = _frame.sender(&_reg_map);
 469   }
 470 }
 471 
















































 472 
 473 // Step back n frames, skip any pseudo frames in between.
 474 // This function is used in Class.forName, Class.newInstance, Method.Invoke,
 475 // AccessController.doPrivileged.
 476 void vframeStreamCommon::security_get_caller_frame(int depth) {
 477   assert(depth >= 0, "invalid depth: %d", depth);
 478   for (int n = 0; !at_end(); security_next()) {
 479     if (!method()->is_ignored_by_security_stack_walk()) {
 480       if (n == depth) {
 481         // We have reached the desired depth; return.
 482         return;
 483       }
 484       n++;  // this is a non-skipped frame; count it against the depth
 485     }
 486   }
 487   // NOTE: At this point there were not enough frames on the stack
 488   // to walk to depth.  Callers of this method have to check for at_end.
 489 }
 490 
 491 


 651 
 652 void javaVFrame::print_value() const {
 653   Method*    m = method();
 654   InstanceKlass*     k = m->method_holder();
 655   tty->print_cr("frame( sp=" INTPTR_FORMAT ", unextended_sp=" INTPTR_FORMAT ", fp=" INTPTR_FORMAT ", pc=" INTPTR_FORMAT ")",
 656                 p2i(_fr.sp()),  p2i(_fr.unextended_sp()), p2i(_fr.fp()), p2i(_fr.pc()));
 657   tty->print("%s.%s", k->internal_name(), m->name()->as_C_string());
 658 
 659   if (!m->is_native()) {
 660     Symbol*  source_name = k->source_file_name();
 661     int        line_number = m->line_number_from_bci(bci());
 662     if (source_name != NULL && (line_number != -1)) {
 663       tty->print("(%s:%d)", source_name->as_C_string(), line_number);
 664     }
 665   } else {
 666     tty->print("(Native Method)");
 667   }
 668   // Check frame size and print warning if it looks suspiciously large
 669   if (fr().sp() != NULL) {
 670     RegisterMap map = *register_map();
 671     uint size = fr().frame_size(&map);


 672 #ifdef _LP64
 673     if (size > 8*K) warning("SUSPICIOUSLY LARGE FRAME (%d)", size);
 674 #else
 675     if (size > 4*K) warning("SUSPICIOUSLY LARGE FRAME (%d)", size);
 676 #endif
 677   }
 678 }
 679 
 680 
 681 bool javaVFrame::structural_compare(javaVFrame* other) {
 682   // Check static part
 683   if (method() != other->method()) return false;
 684   if (bci()    != other->bci())    return false;
 685 
 686   // Check locals
 687   StackValueCollection *locs = locals();
 688   StackValueCollection *other_locs = other->locals();
 689   assert(locs->size() == other_locs->size(), "sanity check");
 690   int i;
 691   for(i = 0; i < locs->size(); i++) {




  72     if (cb->is_compiled()) {
  73       CompiledMethod* nm = (CompiledMethod*)cb;
  74       return new compiledVFrame(f, reg_map, thread, nm);
  75     }
  76 
  77     if (f->is_runtime_frame()) {
  78       // Skip this frame and try again.
  79       RegisterMap temp_map = *reg_map;
  80       frame s = f->sender(&temp_map);
  81       return new_vframe(&s, &temp_map, thread);
  82     }
  83   }
  84 
  85   // External frame
  86   return new externalVFrame(f, reg_map, thread);
  87 }
  88 
  89 vframe* vframe::sender() const {
  90   RegisterMap temp_map = *register_map();
  91   assert(is_top(), "just checking");
  92   if (_fr.is_empty()) return NULL;
  93   if (_fr.is_entry_frame() && _fr.is_first_frame()) return NULL;
  94   frame s = _fr.real_sender(&temp_map);
  95   if (s.is_first_frame()) return NULL;
  96   return vframe::new_vframe(&s, &temp_map, thread());
  97 }
  98 
  99 vframe* vframe::top() const {
 100   vframe* vf = (vframe*) this;
 101   while (!vf->is_top()) vf = vf->sender();
 102   return vf;
 103 }
 104 
 105 
 106 javaVFrame* vframe::java_sender() const {
 107   vframe* f = sender();
 108   while (f != NULL) {
 109     if (f->is_java_frame()) return javaVFrame::cast(f);
 110     f = f->sender();
 111   }
 112   return NULL;


 241                 !mark->monitor()->is_entered(thread())
 242               )) {
 243             lock_state = "waiting to lock";
 244           } else {
 245             // We own the monitor which is not as interesting so
 246             // disable the extra printing below.
 247             mark = NULL;
 248           }
 249         }
 250         print_locked_object_class_name(st, Handle(THREAD, monitor->owner()), lock_state);
 251 
 252         found_first_monitor = true;
 253       }
 254     }
 255   }
 256 }
 257 
 258 // ------------- interpretedVFrame --------------
 259 
 260 u_char* interpretedVFrame::bcp() const {
 261     return (!register_map()->in_cont())  ? fr().interpreter_frame_bcp() : Continuation::interpreter_frame_bcp(fr(), register_map());
 262 }
 263 
 264 void interpretedVFrame::set_bcp(u_char* bcp) {
 265   assert (!register_map()->in_cont(), ""); // unsupported for now because seems to be unused
 266   fr().interpreter_frame_set_bcp(bcp);
 267 }
 268 
 269 intptr_t* interpretedVFrame::locals_addr_at(int offset) const {
 270   assert (!register_map()->in_cont(), ""); // unsupported for now because seems to be unused
 271   assert(fr().is_interpreted_frame(), "frame should be an interpreted frame");
 272   return fr().interpreter_frame_local_at(offset);
 273 }
 274 
 275 
 276 GrowableArray<MonitorInfo*>* interpretedVFrame::monitors() const {
 277   GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(5);
 278   if (!register_map()->in_cont()) { // no monitors in continuations
 279     for (BasicObjectLock* current = (fr().previous_monitor_in_interpreter_frame(fr().interpreter_frame_monitor_begin()));
 280         current >= fr().interpreter_frame_monitor_end();
 281         current = fr().previous_monitor_in_interpreter_frame(current)) {
 282       result->push(new MonitorInfo(current->obj(), current->lock(), false, false));
 283     }
 284   }
 285   return result;
 286 }
 287 
 288 int interpretedVFrame::bci() const {
 289   return method()->bci_from(bcp());
 290 }
 291 
 292 Method* interpretedVFrame::method() const {
 293   return (!register_map()->in_cont()) ? fr().interpreter_frame_method() : Continuation::interpreter_frame_method(fr(), register_map());
 294 }
 295 
 296 static StackValue* create_stack_value_from_oop_map(const RegisterMap* reg_map,
 297                                                    const InterpreterOopMap& oop_mask,
 298                                                    int index,
 299                                                    const intptr_t* const addr) {
 300 
 301   assert(index >= 0 &&
 302          index < oop_mask.number_of_entries(), "invariant");
 303 
 304   // categorize using oop_mask
 305   if (oop_mask.is_oop(index)) {
 306     oop obj = NULL;
 307     if (addr != NULL) {
 308       // obj = (UseCompressedOops && reg_map->in_cont()) ? HeapAccess<IS_ARRAY>::oop_load((narrowOop*)addr) : *(oop*)addr;
 309       if (UseCompressedOops && reg_map->in_cont())
 310         obj = HeapAccess<IS_ARRAY>::oop_load((narrowOop*)addr);
 311       else
 312         obj = *(oop*)addr;
 313     }
 314     // reference (oop) "r"
 315     Handle h(Thread::current(), obj);
 316     return new StackValue(h);
 317   }
 318   // value (integer) "v"
 319   return new StackValue(addr != NULL ? *addr : 0);
 320 }
 321 
 322 static bool is_in_expression_stack(const frame& fr, const intptr_t* const addr) {
 323   assert(addr != NULL, "invariant");
 324 
 325   // Ensure to be 'inside' the expresion stack (i.e., addr >= sp for Intel).
 326   // In case of exceptions, the expression stack is invalid and the sp
 327   // will be reset to express this condition.
 328   if (frame::interpreter_frame_expression_stack_direction() > 0) {
 329     return addr <= fr.interpreter_frame_tos_address();
 330   }
 331 
 332   return addr >= fr.interpreter_frame_tos_address();
 333 }
 334 
 335 static void stack_locals(StackValueCollection* result,
 336                          int length,
 337                          const InterpreterOopMap& oop_mask,
 338                          const frame& fr,
 339                          const RegisterMap* reg_map) {
 340 
 341   assert(result != NULL, "invariant");
 342 
 343   for (int i = 0; i < length; ++i) {
 344     const intptr_t* addr;
 345     if (!reg_map->in_cont()) {
 346       addr = fr.interpreter_frame_local_at(i);
 347       assert(addr >= fr.sp(), "must be inside the frame");
 348     } else {
 349       addr = (intptr_t*)Continuation::interpreter_frame_local_at(fr, reg_map, oop_mask, i);
 350     }
 351     assert(addr != NULL, "invariant");

 352 
 353     StackValue* const sv = create_stack_value_from_oop_map(reg_map, oop_mask, i, addr);
 354     assert(sv != NULL, "sanity check");
 355 
 356     result->add(sv);
 357   }
 358 }
 359 
 360 static void stack_expressions(StackValueCollection* result,
 361                               int length,
 362                               int max_locals,
 363                               const InterpreterOopMap& oop_mask,
 364                               const frame& fr,
 365                               const RegisterMap* reg_map) {
 366 
 367   assert(result != NULL, "invariant");
 368 
 369   for (int i = 0; i < length; ++i) {
 370     const intptr_t* addr;
 371     if (!reg_map->in_cont()) {
 372       addr = fr.interpreter_frame_expression_stack_at(i);
 373       assert(addr != NULL, "invariant");
 374       if (!is_in_expression_stack(fr, addr)) {
 375         // Need to ensure no bogus escapes.
 376         addr = NULL;
 377       }
 378     } else {
 379       addr = (intptr_t*)Continuation::interpreter_frame_expression_stack_at(fr, reg_map, oop_mask, i);
 380     }
 381 
 382     StackValue* const sv = create_stack_value_from_oop_map(reg_map,
 383                                                            oop_mask,
 384                                                            i + max_locals,
 385                                                            addr);
 386     assert(sv != NULL, "sanity check");
 387 
 388     result->add(sv);
 389   }
 390 }
 391 
 392 StackValueCollection* interpretedVFrame::locals() const {
 393   return stack_data(false);
 394 }
 395 
 396 StackValueCollection* interpretedVFrame::expressions() const {
 397   return stack_data(true);
 398 }
 399 
 400 /*
 401  * Worker routine for fetching references and/or values
 402  * for a particular bci in the interpretedVFrame.
 403  *


 414   method()->mask_for(bci(), &oop_mask);
 415   const int mask_len = oop_mask.number_of_entries();
 416 
 417   // If the method is native, method()->max_locals() is not telling the truth.
 418   // For our purposes, max locals instead equals the size of parameters.
 419   const int max_locals = method()->is_native() ?
 420     method()->size_of_parameters() : method()->max_locals();
 421 
 422   assert(mask_len >= max_locals, "invariant");
 423 
 424   const int length = expressions ? mask_len - max_locals : max_locals;
 425   assert(length >= 0, "invariant");
 426 
 427   StackValueCollection* const result = new StackValueCollection(length);
 428 
 429   if (0 == length) {
 430     return result;
 431   }
 432 
 433   if (expressions) {
 434     stack_expressions(result, length, max_locals, oop_mask, fr(), register_map());
 435   } else {
 436     stack_locals(result, length, oop_mask, fr(), register_map());
 437   }
 438 
 439   assert(length == result->size(), "invariant");
 440 
 441   return result;
 442 }
 443 
 444 void interpretedVFrame::set_locals(StackValueCollection* values) const {
 445   if (values == NULL || values->size() == 0) return;
 446 
 447   // If the method is native, max_locals is not telling the truth.
 448   // maxlocals then equals the size of parameters
 449   const int max_locals = method()->is_native() ?
 450     method()->size_of_parameters() : method()->max_locals();
 451 
 452   assert(max_locals == values->size(), "Mismatch between actual stack format and supplied data");
 453 
 454   // handle locals
 455   for (int i = 0; i < max_locals; i++) {
 456     // Find stack location


 467   }
 468 }
 469 
 470 // ------------- cChunk --------------
 471 
 472 entryVFrame::entryVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread)
 473 : externalVFrame(fr, reg_map, thread) {}
 474 
 475 #ifdef ASSERT
 476 void vframeStreamCommon::found_bad_method_frame() const {
 477   // 6379830 Cut point for an assertion that occasionally fires when
 478   // we are using the performance analyzer.
 479   // Disable this assert when testing the analyzer with fastdebug.
 480   // -XX:SuppressErrorAt=vframe.cpp:XXX (XXX=following line number)
 481   fatal("invalid bci or invalid scope desc");
 482 }
 483 #endif
 484 
 485 // top-frame will be skipped
 486 vframeStream::vframeStream(JavaThread* thread, frame top_frame,
 487   bool stop_at_java_call_stub) : vframeStreamCommon(RegisterMap(thread, false, true)) {
 488   _stop_at_java_call_stub = stop_at_java_call_stub;
 489 
 490   // skip top frame, as it may not be at safepoint
 491   _prev_frame = top_frame;
 492   _frame  = top_frame.sender(&_reg_map);
 493   while (!fill_from_frame()) {
 494     _prev_frame = _frame;
 495     _frame = _frame.sender(&_reg_map);
 496   }
 497 }
 498 
 499 vframeStream::vframeStream(JavaThread* thread, Handle continuation_scope, bool stop_at_java_call_stub) 
 500  : vframeStreamCommon(RegisterMap(thread, false, true)) {
 501 
 502   _stop_at_java_call_stub = stop_at_java_call_stub;
 503   _continuation_scope = continuation_scope;
 504   
 505   if (!thread->has_last_Java_frame()) {
 506     _mode = at_end_mode;
 507     return;
 508   }
 509 
 510   _frame = _thread->last_frame();
 511   oop cont = _thread->last_continuation();
 512   while (!fill_from_frame()) {
 513     if (cont != (oop)NULL && Continuation::is_continuation_entry_frame(_frame, &_reg_map)) {
 514       cont = java_lang_Continuation::parent(cont);
 515     }
 516     _frame = _frame.sender(&_reg_map);
 517   }
 518   _cont = Handle(Thread::current(), cont);
 519 
 520   assert (_reg_map.cont() == (oop)NULL || oopDesc::equals(_cont(), _reg_map.cont()), 
 521     "map.cont: " INTPTR_FORMAT " vframeStream: " INTPTR_FORMAT, 
 522     p2i((oopDesc*)_reg_map.cont()), p2i((oopDesc*)_cont()));
 523 }
 524 
 525 vframeStream::vframeStream(Handle continuation) 
 526  : vframeStreamCommon(RegisterMap(NULL, false, true)) {
 527 
 528   _stop_at_java_call_stub = false;
 529   _continuation_scope = Handle();
 530   
 531   if (!Continuation::has_last_Java_frame(continuation)) {
 532     _mode = at_end_mode;
 533     return;
 534   }
 535 
 536   _frame = Continuation::last_frame(continuation, &_reg_map);
 537   _cont = continuation;
 538   while (!fill_from_frame()) {
 539     _frame = _frame.sender(&_reg_map);
 540   }
 541 
 542   assert (_reg_map.cont() == (oop)NULL || oopDesc::equals(_cont(), _reg_map.cont()), 
 543     "map.cont: " INTPTR_FORMAT " vframeStream: " INTPTR_FORMAT, 
 544     p2i((oopDesc*)_reg_map.cont()), p2i((oopDesc*)_cont()));
 545 }
 546 
 547 
 548 // Step back n frames, skip any pseudo frames in between.
 549 // This function is used in Class.forName, Class.newInstance, Method.Invoke,
 550 // AccessController.doPrivileged.
 551 void vframeStreamCommon::security_get_caller_frame(int depth) {
 552   assert(depth >= 0, "invalid depth: %d", depth);
 553   for (int n = 0; !at_end(); security_next()) {
 554     if (!method()->is_ignored_by_security_stack_walk()) {
 555       if (n == depth) {
 556         // We have reached the desired depth; return.
 557         return;
 558       }
 559       n++;  // this is a non-skipped frame; count it against the depth
 560     }
 561   }
 562   // NOTE: At this point there were not enough frames on the stack
 563   // to walk to depth.  Callers of this method have to check for at_end.
 564 }
 565 
 566 


 726 
 727 void javaVFrame::print_value() const {
 728   Method*    m = method();
 729   InstanceKlass*     k = m->method_holder();
 730   tty->print_cr("frame( sp=" INTPTR_FORMAT ", unextended_sp=" INTPTR_FORMAT ", fp=" INTPTR_FORMAT ", pc=" INTPTR_FORMAT ")",
 731                 p2i(_fr.sp()),  p2i(_fr.unextended_sp()), p2i(_fr.fp()), p2i(_fr.pc()));
 732   tty->print("%s.%s", k->internal_name(), m->name()->as_C_string());
 733 
 734   if (!m->is_native()) {
 735     Symbol*  source_name = k->source_file_name();
 736     int        line_number = m->line_number_from_bci(bci());
 737     if (source_name != NULL && (line_number != -1)) {
 738       tty->print("(%s:%d)", source_name->as_C_string(), line_number);
 739     }
 740   } else {
 741     tty->print("(Native Method)");
 742   }
 743   // Check frame size and print warning if it looks suspiciously large
 744   if (fr().sp() != NULL) {
 745     RegisterMap map = *register_map();
 746     uint size = (map.in_cont() || Continuation::is_cont_barrier_frame(fr()))
 747       ? Continuation::frame_size(fr(), &map)
 748       : fr().frame_size(&map);
 749 #ifdef _LP64
 750     if (size > 8*K) warning("SUSPICIOUSLY LARGE FRAME (%d)", size);
 751 #else
 752     if (size > 4*K) warning("SUSPICIOUSLY LARGE FRAME (%d)", size);
 753 #endif
 754   }
 755 }
 756 
 757 
 758 bool javaVFrame::structural_compare(javaVFrame* other) {
 759   // Check static part
 760   if (method() != other->method()) return false;
 761   if (bci()    != other->bci())    return false;
 762 
 763   // Check locals
 764   StackValueCollection *locs = locals();
 765   StackValueCollection *other_locs = other->locals();
 766   assert(locs->size() == other_locs->size(), "sanity check");
 767   int i;
 768   for(i = 0; i < locs->size(); i++) {


< prev index next >