< prev index next >

src/hotspot/share/code/codeBlob.cpp

Print this page

  47 #include "runtime/mutexLocker.hpp"
  48 #include "runtime/safepoint.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "runtime/stubCodeGenerator.hpp"
  51 #include "runtime/stubRoutines.hpp"
  52 #include "runtime/vframe.hpp"
  53 #include "services/memoryService.hpp"
  54 #include "utilities/align.hpp"
  55 #ifdef COMPILER1
  56 #include "c1/c1_Runtime1.hpp"
  57 #endif
  58 
  59 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  60 // Check all "leaf" subclasses of CodeBlob class.
  61 
  62 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  63 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  65 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");

  67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  68 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  69 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  70 #ifdef COMPILER2
  71 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  73 #endif
  74 
  75 // Add proxy vtables.
  76 // We need only few for now - they are used only from prints.
  77 const nmethod::Vptr                  nmethod::_vpntr;
  78 const BufferBlob::Vptr               BufferBlob::_vpntr;
  79 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  80 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  81 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  82 const SafepointBlob::Vptr            SafepointBlob::_vpntr;
  83 #ifdef COMPILER2
  84 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  85 const UncommonTrapBlob::Vptr         UncommonTrapBlob::_vpntr;
  86 #endif // COMPILER2
  87 const UpcallStub::Vptr               UpcallStub::_vpntr;
  88 
  89 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  90   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  91       nullptr/* None */,
  92       &nmethod::_vpntr,
  93       &BufferBlob::_vpntr,
  94       &AdapterBlob::_vpntr,
  95       &VtableBlob::_vpntr,
  96       &MethodHandlesAdapterBlob::_vpntr,

  97       &RuntimeStub::_vpntr,
  98       &DeoptimizationBlob::_vpntr,
  99       &SafepointBlob::_vpntr,
 100 #ifdef COMPILER2
 101       &ExceptionBlob::_vpntr,
 102       &UncommonTrapBlob::_vpntr,
 103 #endif
 104       &UpcallStub::_vpntr
 105   };
 106 
 107   return array[(size_t)kind];
 108 }
 109 
 110 const CodeBlob::Vptr* CodeBlob::vptr() const {
 111   return vptr(_kind);
 112 }
 113 
 114 unsigned int CodeBlob::align_code_offset(int offset) {
 115   // align the size to CodeEntryAlignment
 116   int header_size = (int)CodeHeap::header_size();

 407   // Track memory usage statistic after releasing CodeCache_lock
 408   MemoryService::track_code_cache_memory_usage();
 409 
 410   return blob;
 411 }
 412 
 413 
 414 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 415   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 416 {}
 417 
 418 // Used by gtest
 419 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 420   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 421 
 422   BufferBlob* blob = nullptr;
 423   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 424   assert(name != nullptr, "must provide a name");
 425   {
 426     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 427     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
 428   }
 429   // Track memory usage statistic after releasing CodeCache_lock
 430   MemoryService::track_code_cache_memory_usage();
 431 
 432   return blob;
 433 }
 434 
 435 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 436   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 437 }
 438 
 439 void BufferBlob::free(BufferBlob *blob) {
 440   RuntimeBlob::free(blob);
 441 }
 442 




 443 
 444 //----------------------------------------------------------------------------------------------------
 445 // Implementation of AdapterBlob
 446 
 447 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) :
 448   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob)) {
 449   assert(entry_offset[I2C] == 0, "sanity check");
 450 #ifdef ASSERT

 451   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 452     // The entry is within the adapter blob or unset.
 453     int offset = entry_offset[i];
 454     assert((offset > 0 && offset < cb->insts()->size()) ||
 455            (i >= C2I_No_Clinit_Check && offset == -1),
 456            "invalid entry offset[%d] = 0x%x", i, offset);
 457   }
 458 #endif // ASSERT
 459   _c2i_offset = entry_offset[C2I];


 460   _c2i_unverified_offset = entry_offset[C2I_Unverified];

 461   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 462   CodeCache::commit(this);
 463 }
 464 
 465 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) {
 466   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 467 
 468   CodeCache::gc_on_allocation();
 469 
 470   AdapterBlob* blob = nullptr;
 471   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 472   {
 473     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 474     blob = new (size) AdapterBlob(size, cb, entry_offset);
 475   }
 476   // Track memory usage statistic after releasing CodeCache_lock
 477   MemoryService::track_code_cache_memory_usage();
 478 
 479   return blob;
 480 }
 481 
 482 //----------------------------------------------------------------------------------------------------
 483 // Implementation of VtableBlob
 484 
 485 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 486   // Handling of allocation failure stops compilation and prints a bunch of
 487   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 488   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 489   // this context as we hold the CompiledICLocker. So we just don't handle code
 490   // cache exhaustion here; we leave that for a later allocation that does not
 491   // hold the CompiledICLocker.
 492   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 493 }
 494 

 537   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 538 
 539   MethodHandlesAdapterBlob* blob = nullptr;
 540   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 541   // align the size to CodeEntryAlignment
 542   size = CodeBlob::align_code_offset(size);
 543   size += align_up(buffer_size, oopSize);
 544   {
 545     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 546     blob = new (size) MethodHandlesAdapterBlob(size);
 547     if (blob == nullptr) {
 548       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 549     }
 550   }
 551   // Track memory usage statistic after releasing CodeCache_lock
 552   MemoryService::track_code_cache_memory_usage();
 553 
 554   return blob;
 555 }
 556 

























 557 //----------------------------------------------------------------------------------------------------
 558 // Implementation of RuntimeStub
 559 
 560 RuntimeStub::RuntimeStub(
 561   const char* name,
 562   CodeBuffer* cb,
 563   int         size,
 564   int16_t     frame_complete,
 565   int         frame_size,
 566   OopMapSet*  oop_maps,
 567   bool        caller_must_gc_arguments
 568 )
 569 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 570               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 571 {
 572 }
 573 
 574 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 575                                            CodeBuffer* cb,
 576                                            int16_t frame_complete,

  47 #include "runtime/mutexLocker.hpp"
  48 #include "runtime/safepoint.hpp"
  49 #include "runtime/sharedRuntime.hpp"
  50 #include "runtime/stubCodeGenerator.hpp"
  51 #include "runtime/stubRoutines.hpp"
  52 #include "runtime/vframe.hpp"
  53 #include "services/memoryService.hpp"
  54 #include "utilities/align.hpp"
  55 #ifdef COMPILER1
  56 #include "c1/c1_Runtime1.hpp"
  57 #endif
  58 
  59 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  60 // Check all "leaf" subclasses of CodeBlob class.
  61 
  62 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  63 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  65 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");
  67 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value,   "no virtual methods are allowed in code blobs");
  68 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  69 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  70 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  71 #ifdef COMPILER2
  72 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  73 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  74 #endif
  75 
  76 // Add proxy vtables.
  77 // We need only few for now - they are used only from prints.
  78 const nmethod::Vptr                  nmethod::_vpntr;
  79 const BufferBlob::Vptr               BufferBlob::_vpntr;
  80 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  81 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  82 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  83 const SafepointBlob::Vptr            SafepointBlob::_vpntr;
  84 #ifdef COMPILER2
  85 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  86 const UncommonTrapBlob::Vptr         UncommonTrapBlob::_vpntr;
  87 #endif // COMPILER2
  88 const UpcallStub::Vptr               UpcallStub::_vpntr;
  89 
  90 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  91   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  92       nullptr/* None */,
  93       &nmethod::_vpntr,
  94       &BufferBlob::_vpntr,
  95       &AdapterBlob::_vpntr,
  96       &VtableBlob::_vpntr,
  97       &MethodHandlesAdapterBlob::_vpntr,
  98       &BufferedInlineTypeBlob::_vpntr,
  99       &RuntimeStub::_vpntr,
 100       &DeoptimizationBlob::_vpntr,
 101       &SafepointBlob::_vpntr,
 102 #ifdef COMPILER2
 103       &ExceptionBlob::_vpntr,
 104       &UncommonTrapBlob::_vpntr,
 105 #endif
 106       &UpcallStub::_vpntr
 107   };
 108 
 109   return array[(size_t)kind];
 110 }
 111 
 112 const CodeBlob::Vptr* CodeBlob::vptr() const {
 113   return vptr(_kind);
 114 }
 115 
 116 unsigned int CodeBlob::align_code_offset(int offset) {
 117   // align the size to CodeEntryAlignment
 118   int header_size = (int)CodeHeap::header_size();

 409   // Track memory usage statistic after releasing CodeCache_lock
 410   MemoryService::track_code_cache_memory_usage();
 411 
 412   return blob;
 413 }
 414 
 415 
 416 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 417   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 418 {}
 419 
 420 // Used by gtest
 421 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 422   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 423 
 424   BufferBlob* blob = nullptr;
 425   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 426   assert(name != nullptr, "must provide a name");
 427   {
 428     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 429     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
 430   }
 431   // Track memory usage statistic after releasing CodeCache_lock
 432   MemoryService::track_code_cache_memory_usage();
 433 
 434   return blob;
 435 }
 436 
 437 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 438   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 439 }
 440 
 441 void BufferBlob::free(BufferBlob *blob) {
 442   RuntimeBlob::free(blob);
 443 }
 444 
 445 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
 446   : RuntimeBlob(name, kind, cb, size, header_size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 447 {}
 448 
 449 
 450 //----------------------------------------------------------------------------------------------------
 451 // Implementation of AdapterBlob
 452 
 453 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
 454   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {

 455 #ifdef ASSERT
 456   assert(entry_offset[I2C] == 0, "sanity check");
 457   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 458     // The entry is within the adapter blob or unset.
 459     int offset = entry_offset[i];
 460     assert((offset > 0 && offset < cb->insts()->size()) ||
 461            (i >= C2I_No_Clinit_Check && offset == -1),
 462            "invalid entry offset[%d] = 0x%x", i, offset);
 463   }
 464 #endif // ASSERT
 465   _c2i_offset = entry_offset[C2I];
 466   _c2i_inline_offset = entry_offset[C2I_Inline];
 467   _c2i_inline_ro_offset = entry_offset[C2I_Inline_RO];
 468   _c2i_unverified_offset = entry_offset[C2I_Unverified];
 469   _c2i_unverified_inline_offset = entry_offset[C2I_Unverified_Inline];
 470   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 471   CodeCache::commit(this);
 472 }
 473 
 474 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
 475   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 476 
 477   CodeCache::gc_on_allocation();
 478 
 479   AdapterBlob* blob = nullptr;
 480   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 481   {
 482     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 483     blob = new (size) AdapterBlob(size, cb, entry_offset, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
 484   }
 485   // Track memory usage statistic after releasing CodeCache_lock
 486   MemoryService::track_code_cache_memory_usage();
 487 
 488   return blob;
 489 }
 490 
 491 //----------------------------------------------------------------------------------------------------
 492 // Implementation of VtableBlob
 493 
 494 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 495   // Handling of allocation failure stops compilation and prints a bunch of
 496   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 497   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 498   // this context as we hold the CompiledICLocker. So we just don't handle code
 499   // cache exhaustion here; we leave that for a later allocation that does not
 500   // hold the CompiledICLocker.
 501   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 502 }
 503 

 546   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 547 
 548   MethodHandlesAdapterBlob* blob = nullptr;
 549   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 550   // align the size to CodeEntryAlignment
 551   size = CodeBlob::align_code_offset(size);
 552   size += align_up(buffer_size, oopSize);
 553   {
 554     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 555     blob = new (size) MethodHandlesAdapterBlob(size);
 556     if (blob == nullptr) {
 557       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 558     }
 559   }
 560   // Track memory usage statistic after releasing CodeCache_lock
 561   MemoryService::track_code_cache_memory_usage();
 562 
 563   return blob;
 564 }
 565 
 566 //----------------------------------------------------------------------------------------------------
 567 // Implementation of BufferedInlineTypeBlob
 568 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
 569   BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
 570   _pack_fields_off(pack_fields_off),
 571   _pack_fields_jobject_off(pack_fields_jobject_off),
 572   _unpack_fields_off(unpack_fields_off) {
 573   CodeCache::commit(this);
 574 }
 575 
 576 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
 577   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 578 
 579   BufferedInlineTypeBlob* blob = nullptr;
 580   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
 581   {
 582     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 583     blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
 584   }
 585   // Track memory usage statistic after releasing CodeCache_lock
 586   MemoryService::track_code_cache_memory_usage();
 587 
 588   return blob;
 589 }
 590 
 591 //----------------------------------------------------------------------------------------------------
 592 // Implementation of RuntimeStub
 593 
 594 RuntimeStub::RuntimeStub(
 595   const char* name,
 596   CodeBuffer* cb,
 597   int         size,
 598   int16_t     frame_complete,
 599   int         frame_size,
 600   OopMapSet*  oop_maps,
 601   bool        caller_must_gc_arguments
 602 )
 603 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 604               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 605 {
 606 }
 607 
 608 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 609                                            CodeBuffer* cb,
 610                                            int16_t frame_complete,
< prev index next >