< prev index next >

src/hotspot/share/code/codeBlob.cpp

Print this page

  47 #include "runtime/mutexLocker.hpp"
  48 #include "runtime/safepoint.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "runtime/stubCodeGenerator.hpp"
  51 #include "runtime/stubRoutines.hpp"
  52 #include "runtime/vframe.hpp"
  53 #include "services/memoryService.hpp"
  54 #include "utilities/align.hpp"
  55 #ifdef COMPILER1
  56 #include "c1/c1_Runtime1.hpp"
  57 #endif
  58 
  59 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  60 // Check all "leaf" subclasses of CodeBlob class.
  61 
  62 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  63 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  65 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");

  67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  68 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  69 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  70 #ifdef COMPILER2
  71 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  73 #endif
  74 
  75 // Add proxy vtables.
  76 // We need only few for now - they are used only from prints.
  77 const nmethod::Vptr                  nmethod::_vpntr;
  78 const BufferBlob::Vptr               BufferBlob::_vpntr;
  79 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  80 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  81 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  82 const SafepointBlob::Vptr            SafepointBlob::_vpntr;
  83 #ifdef COMPILER2
  84 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  85 const UncommonTrapBlob::Vptr         UncommonTrapBlob::_vpntr;
  86 #endif // COMPILER2
  87 const UpcallStub::Vptr               UpcallStub::_vpntr;
  88 
  89 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  90   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  91       nullptr/* None */,
  92       &nmethod::_vpntr,
  93       &BufferBlob::_vpntr,
  94       &AdapterBlob::_vpntr,
  95       &VtableBlob::_vpntr,
  96       &MethodHandlesAdapterBlob::_vpntr,

  97       &RuntimeStub::_vpntr,
  98       &DeoptimizationBlob::_vpntr,
  99       &SafepointBlob::_vpntr,
 100 #ifdef COMPILER2
 101       &ExceptionBlob::_vpntr,
 102       &UncommonTrapBlob::_vpntr,
 103 #endif
 104       &UpcallStub::_vpntr
 105   };
 106 
 107   return array[(size_t)kind];
 108 }
 109 
 110 const CodeBlob::Vptr* CodeBlob::vptr() const {
 111   return vptr(_kind);
 112 }
 113 
 114 unsigned int CodeBlob::align_code_offset(int offset) {
 115   // align the size to CodeEntryAlignment
 116   int header_size = (int)CodeHeap::header_size();

 412   // Track memory usage statistic after releasing CodeCache_lock
 413   MemoryService::track_code_cache_memory_usage();
 414 
 415   return blob;
 416 }
 417 
 418 
 419 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 420   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 421 {}
 422 
 423 // Used by gtest
 424 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 425   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 426 
 427   BufferBlob* blob = nullptr;
 428   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 429   assert(name != nullptr, "must provide a name");
 430   {
 431     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 432     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
 433   }
 434   // Track memory usage statistic after releasing CodeCache_lock
 435   MemoryService::track_code_cache_memory_usage();
 436 
 437   return blob;
 438 }
 439 
 440 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 441   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 442 }
 443 
 444 void BufferBlob::free(BufferBlob *blob) {
 445   RuntimeBlob::free(blob);
 446 }
 447 




 448 
 449 //----------------------------------------------------------------------------------------------------
 450 // Implementation of AdapterBlob
 451 
 452 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) :
 453   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob)) {
 454   assert(entry_offset[I2C] == 0, "sanity check");
 455 #ifdef ASSERT

 456   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 457     // The entry is within the adapter blob or unset.
 458     int offset = entry_offset[i];
 459     assert((offset > 0 && offset < cb->insts()->size()) ||
 460            (i >= C2I_No_Clinit_Check && offset == -1),
 461            "invalid entry offset[%d] = 0x%x", i, offset);
 462   }
 463 #endif // ASSERT
 464   _c2i_offset = entry_offset[C2I];


 465   _c2i_unverified_offset = entry_offset[C2I_Unverified];

 466   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 467   CodeCache::commit(this);
 468 }
 469 
 470 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) {
 471   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 472 
 473   CodeCache::gc_on_allocation();
 474 
 475   AdapterBlob* blob = nullptr;
 476   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 477   {
 478     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 479     blob = new (size) AdapterBlob(size, cb, entry_offset);
 480   }
 481   // Track memory usage statistic after releasing CodeCache_lock
 482   MemoryService::track_code_cache_memory_usage();
 483 
 484   return blob;
 485 }
 486 
 487 //----------------------------------------------------------------------------------------------------
 488 // Implementation of VtableBlob
 489 
 490 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 491   // Handling of allocation failure stops compilation and prints a bunch of
 492   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 493   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 494   // this context as we hold the CompiledICLocker. So we just don't handle code
 495   // cache exhaustion here; we leave that for a later allocation that does not
 496   // hold the CompiledICLocker.
 497   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 498 }
 499 

 542   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 543 
 544   MethodHandlesAdapterBlob* blob = nullptr;
 545   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 546   // align the size to CodeEntryAlignment
 547   size = CodeBlob::align_code_offset(size);
 548   size += align_up(buffer_size, oopSize);
 549   {
 550     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 551     blob = new (size) MethodHandlesAdapterBlob(size);
 552     if (blob == nullptr) {
 553       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 554     }
 555   }
 556   // Track memory usage statistic after releasing CodeCache_lock
 557   MemoryService::track_code_cache_memory_usage();
 558 
 559   return blob;
 560 }
 561 

























 562 //----------------------------------------------------------------------------------------------------
 563 // Implementation of RuntimeStub
 564 
 565 RuntimeStub::RuntimeStub(
 566   const char* name,
 567   CodeBuffer* cb,
 568   int         size,
 569   int16_t     frame_complete,
 570   int         frame_size,
 571   OopMapSet*  oop_maps,
 572   bool        caller_must_gc_arguments
 573 )
 574 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 575               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 576 {
 577 }
 578 
 579 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 580                                            CodeBuffer* cb,
 581                                            int16_t frame_complete,

  47 #include "runtime/mutexLocker.hpp"
  48 #include "runtime/safepoint.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "runtime/stubCodeGenerator.hpp"
  51 #include "runtime/stubRoutines.hpp"
  52 #include "runtime/vframe.hpp"
  53 #include "services/memoryService.hpp"
  54 #include "utilities/align.hpp"
  55 #ifdef COMPILER1
  56 #include "c1/c1_Runtime1.hpp"
  57 #endif
  58 
  59 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  60 // Check all "leaf" subclasses of CodeBlob class.
  61 
  62 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  63 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  65 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");
  67 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value,   "no virtual methods are allowed in code blobs");
  68 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  69 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  70 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  71 #ifdef COMPILER2
  72 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  73 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  74 #endif
  75 
  76 // Add proxy vtables.
  77 // We need only few for now - they are used only from prints.
  78 const nmethod::Vptr                  nmethod::_vpntr;
  79 const BufferBlob::Vptr               BufferBlob::_vpntr;
  80 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  81 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  82 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  83 const SafepointBlob::Vptr            SafepointBlob::_vpntr;
  84 #ifdef COMPILER2
  85 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  86 const UncommonTrapBlob::Vptr         UncommonTrapBlob::_vpntr;
  87 #endif // COMPILER2
  88 const UpcallStub::Vptr               UpcallStub::_vpntr;
  89 
  90 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  91   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  92       nullptr/* None */,
  93       &nmethod::_vpntr,
  94       &BufferBlob::_vpntr,
  95       &AdapterBlob::_vpntr,
  96       &VtableBlob::_vpntr,
  97       &MethodHandlesAdapterBlob::_vpntr,
  98       &BufferedInlineTypeBlob::_vpntr,
  99       &RuntimeStub::_vpntr,
 100       &DeoptimizationBlob::_vpntr,
 101       &SafepointBlob::_vpntr,
 102 #ifdef COMPILER2
 103       &ExceptionBlob::_vpntr,
 104       &UncommonTrapBlob::_vpntr,
 105 #endif
 106       &UpcallStub::_vpntr
 107   };
 108 
 109   return array[(size_t)kind];
 110 }
 111 
 112 const CodeBlob::Vptr* CodeBlob::vptr() const {
 113   return vptr(_kind);
 114 }
 115 
 116 unsigned int CodeBlob::align_code_offset(int offset) {
 117   // align the size to CodeEntryAlignment
 118   int header_size = (int)CodeHeap::header_size();

 414   // Track memory usage statistic after releasing CodeCache_lock
 415   MemoryService::track_code_cache_memory_usage();
 416 
 417   return blob;
 418 }
 419 
 420 
 421 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 422   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 423 {}
 424 
 425 // Used by gtest
 426 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 427   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 428 
 429   BufferBlob* blob = nullptr;
 430   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 431   assert(name != nullptr, "must provide a name");
 432   {
 433     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 434     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
 435   }
 436   // Track memory usage statistic after releasing CodeCache_lock
 437   MemoryService::track_code_cache_memory_usage();
 438 
 439   return blob;
 440 }
 441 
 442 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 443   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 444 }
 445 
 446 void BufferBlob::free(BufferBlob *blob) {
 447   RuntimeBlob::free(blob);
 448 }
 449 
 450 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
 451   : RuntimeBlob(name, kind, cb, size, header_size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 452 {}
 453 
 454 
 455 //----------------------------------------------------------------------------------------------------
 456 // Implementation of AdapterBlob
 457 
 458 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
 459   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {

 460 #ifdef ASSERT
 461   assert(entry_offset[I2C] == 0, "sanity check");
 462   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 463     // The entry is within the adapter blob or unset.
 464     int offset = entry_offset[i];
 465     assert((offset > 0 && offset < cb->insts()->size()) ||
 466            (i >= C2I_No_Clinit_Check && offset == -1),
 467            "invalid entry offset[%d] = 0x%x", i, offset);
 468   }
 469 #endif // ASSERT
 470   _c2i_offset = entry_offset[C2I];
 471   _c2i_inline_offset = entry_offset[C2I_Inline];
 472   _c2i_inline_ro_offset = entry_offset[C2I_Inline_RO];
 473   _c2i_unverified_offset = entry_offset[C2I_Unverified];
 474   _c2i_unverified_inline_offset = entry_offset[C2I_Unverified_Inline];
 475   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 476   CodeCache::commit(this);
 477 }
 478 
 479 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
 480   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 481 
 482   CodeCache::gc_on_allocation();
 483 
 484   AdapterBlob* blob = nullptr;
 485   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 486   {
 487     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 488     blob = new (size) AdapterBlob(size, cb, entry_offset, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
 489   }
 490   // Track memory usage statistic after releasing CodeCache_lock
 491   MemoryService::track_code_cache_memory_usage();
 492 
 493   return blob;
 494 }
 495 
 496 //----------------------------------------------------------------------------------------------------
 497 // Implementation of VtableBlob
 498 
 499 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 500   // Handling of allocation failure stops compilation and prints a bunch of
 501   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 502   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 503   // this context as we hold the CompiledICLocker. So we just don't handle code
 504   // cache exhaustion here; we leave that for a later allocation that does not
 505   // hold the CompiledICLocker.
 506   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 507 }
 508 

 551   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 552 
 553   MethodHandlesAdapterBlob* blob = nullptr;
 554   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 555   // align the size to CodeEntryAlignment
 556   size = CodeBlob::align_code_offset(size);
 557   size += align_up(buffer_size, oopSize);
 558   {
 559     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 560     blob = new (size) MethodHandlesAdapterBlob(size);
 561     if (blob == nullptr) {
 562       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 563     }
 564   }
 565   // Track memory usage statistic after releasing CodeCache_lock
 566   MemoryService::track_code_cache_memory_usage();
 567 
 568   return blob;
 569 }
 570 
 571 //----------------------------------------------------------------------------------------------------
 572 // Implementation of BufferedInlineTypeBlob
 573 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
 574   BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
 575   _pack_fields_off(pack_fields_off),
 576   _pack_fields_jobject_off(pack_fields_jobject_off),
 577   _unpack_fields_off(unpack_fields_off) {
 578   CodeCache::commit(this);
 579 }
 580 
 581 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
 582   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 583 
 584   BufferedInlineTypeBlob* blob = nullptr;
 585   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
 586   {
 587     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 588     blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
 589   }
 590   // Track memory usage statistic after releasing CodeCache_lock
 591   MemoryService::track_code_cache_memory_usage();
 592 
 593   return blob;
 594 }
 595 
 596 //----------------------------------------------------------------------------------------------------
 597 // Implementation of RuntimeStub
 598 
 599 RuntimeStub::RuntimeStub(
 600   const char* name,
 601   CodeBuffer* cb,
 602   int         size,
 603   int16_t     frame_complete,
 604   int         frame_size,
 605   OopMapSet*  oop_maps,
 606   bool        caller_must_gc_arguments
 607 )
 608 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 609               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 610 {
 611 }
 612 
 613 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 614                                            CodeBuffer* cb,
 615                                            int16_t frame_complete,
< prev index next >