< prev index next >

src/hotspot/share/code/codeBlob.cpp

Print this page

  45 #include "runtime/mutexLocker.hpp"
  46 #include "runtime/safepoint.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubCodeGenerator.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/vframe.hpp"
  51 #include "services/memoryService.hpp"
  52 #include "utilities/align.hpp"
  53 #ifdef COMPILER1
  54 #include "c1/c1_Runtime1.hpp"
  55 #endif
  56 
  57 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  58 // Check all "leaf" subclasses of CodeBlob class.
  59 
  60 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  61 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  62 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  63 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");

  65 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  67 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  68 #ifdef COMPILER2
  69 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  70 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  71 #endif
  72 
  73 // Add proxy vtables.
  74 // We need only few for now - they are used only from prints.
  75 const nmethod::Vptr                  nmethod::_vpntr;
  76 const BufferBlob::Vptr               BufferBlob::_vpntr;
  77 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  78 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  79 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  80 #ifdef COMPILER2
  81 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  82 #endif // COMPILER2
  83 const UpcallStub::Vptr               UpcallStub::_vpntr;
  84 
  85 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  86   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  87       nullptr/* None */,
  88       &nmethod::_vpntr,
  89       &BufferBlob::_vpntr,
  90       &AdapterBlob::_vpntr,
  91       &VtableBlob::_vpntr,
  92       &MethodHandlesAdapterBlob::_vpntr,

  93       &RuntimeStub::_vpntr,
  94       &DeoptimizationBlob::_vpntr,
  95       &SafepointBlob::_vpntr,
  96 #ifdef COMPILER2
  97       &ExceptionBlob::_vpntr,
  98       &UncommonTrapBlob::_vpntr,
  99 #endif
 100       &UpcallStub::_vpntr
 101   };
 102 
 103   return array[(size_t)kind];
 104 }
 105 
 106 const CodeBlob::Vptr* CodeBlob::vptr() const {
 107   return vptr(_kind);
 108 }
 109 
 110 unsigned int CodeBlob::align_code_offset(int offset) {
 111   // align the size to CodeEntryAlignment
 112   int header_size = (int)CodeHeap::header_size();

 408   // Track memory usage statistic after releasing CodeCache_lock
 409   MemoryService::track_code_cache_memory_usage();
 410 
 411   return blob;
 412 }
 413 
 414 
 415 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 416   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 417 {}
 418 
 419 // Used by gtest
 420 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 421   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 422 
 423   BufferBlob* blob = nullptr;
 424   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 425   assert(name != nullptr, "must provide a name");
 426   {
 427     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 428     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
 429   }
 430   // Track memory usage statistic after releasing CodeCache_lock
 431   MemoryService::track_code_cache_memory_usage();
 432 
 433   return blob;
 434 }
 435 
 436 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 437   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 438 }
 439 
 440 void BufferBlob::free(BufferBlob *blob) {
 441   RuntimeBlob::free(blob);
 442 }
 443 




 444 
 445 //----------------------------------------------------------------------------------------------------
 446 // Implementation of AdapterBlob
 447 
 448 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) :
 449   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob)) {
 450   assert(entry_offset[I2C] == 0, "sanity check");
 451 #ifdef ASSERT

 452   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 453     // The entry is within the adapter blob or unset.
 454     int offset = entry_offset[i];
 455     assert((offset > 0 && offset < cb->insts()->size()) ||
 456            (i >= C2I_No_Clinit_Check && offset == -1),
 457            "invalid entry offset[%d] = 0x%x", i, offset);
 458   }
 459 #endif // ASSERT
 460   _c2i_offset = entry_offset[C2I];


 461   _c2i_unverified_offset = entry_offset[C2I_Unverified];

 462   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 463   CodeCache::commit(this);
 464 }
 465 
 466 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) {
 467   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 468 
 469   CodeCache::gc_on_allocation();
 470 
 471   AdapterBlob* blob = nullptr;
 472   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 473   {
 474     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 475     blob = new (size) AdapterBlob(size, cb, entry_offset);
 476   }
 477   // Track memory usage statistic after releasing CodeCache_lock
 478   MemoryService::track_code_cache_memory_usage();
 479 
 480   return blob;
 481 }
 482 
 483 //----------------------------------------------------------------------------------------------------
 484 // Implementation of VtableBlob
 485 
 486 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 487   // Handling of allocation failure stops compilation and prints a bunch of
 488   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 489   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 490   // this context as we hold the CompiledICLocker. So we just don't handle code
 491   // cache exhaustion here; we leave that for a later allocation that does not
 492   // hold the CompiledICLocker.
 493   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 494 }
 495 

 536   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 537 
 538   MethodHandlesAdapterBlob* blob = nullptr;
 539   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 540   // align the size to CodeEntryAlignment
 541   size = CodeBlob::align_code_offset(size);
 542   size += align_up(buffer_size, oopSize);
 543   {
 544     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 545     blob = new (size) MethodHandlesAdapterBlob(size);
 546     if (blob == nullptr) {
 547       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 548     }
 549   }
 550   // Track memory usage statistic after releasing CodeCache_lock
 551   MemoryService::track_code_cache_memory_usage();
 552 
 553   return blob;
 554 }
 555 

























 556 //----------------------------------------------------------------------------------------------------
 557 // Implementation of RuntimeStub
 558 
 559 RuntimeStub::RuntimeStub(
 560   const char* name,
 561   CodeBuffer* cb,
 562   int         size,
 563   int16_t     frame_complete,
 564   int         frame_size,
 565   OopMapSet*  oop_maps,
 566   bool        caller_must_gc_arguments
 567 )
 568 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 569               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 570 {
 571 }
 572 
 573 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 574                                            CodeBuffer* cb,
 575                                            int16_t frame_complete,

  45 #include "runtime/mutexLocker.hpp"
  46 #include "runtime/safepoint.hpp"
  47 #include "runtime/sharedRuntime.hpp"
  48 #include "runtime/stubCodeGenerator.hpp"
  49 #include "runtime/stubRoutines.hpp"
  50 #include "runtime/vframe.hpp"
  51 #include "services/memoryService.hpp"
  52 #include "utilities/align.hpp"
  53 #ifdef COMPILER1
  54 #include "c1/c1_Runtime1.hpp"
  55 #endif
  56 
  57 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
  58 // Check all "leaf" subclasses of CodeBlob class.
  59 
  60 static_assert(!std::is_polymorphic<nmethod>::value,            "no virtual methods are allowed in nmethod");
  61 static_assert(!std::is_polymorphic<AdapterBlob>::value,        "no virtual methods are allowed in code blobs");
  62 static_assert(!std::is_polymorphic<VtableBlob>::value,         "no virtual methods are allowed in code blobs");
  63 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
  64 static_assert(!std::is_polymorphic<RuntimeStub>::value,        "no virtual methods are allowed in code blobs");
  65 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value,   "no virtual methods are allowed in code blobs");
  66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
  67 static_assert(!std::is_polymorphic<SafepointBlob>::value,      "no virtual methods are allowed in code blobs");
  68 static_assert(!std::is_polymorphic<UpcallStub>::value,         "no virtual methods are allowed in code blobs");
  69 #ifdef COMPILER2
  70 static_assert(!std::is_polymorphic<ExceptionBlob>::value,      "no virtual methods are allowed in code blobs");
  71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value,   "no virtual methods are allowed in code blobs");
  72 #endif
  73 
  74 // Add proxy vtables.
  75 // We need only few for now - they are used only from prints.
  76 const nmethod::Vptr                  nmethod::_vpntr;
  77 const BufferBlob::Vptr               BufferBlob::_vpntr;
  78 const RuntimeStub::Vptr              RuntimeStub::_vpntr;
  79 const SingletonBlob::Vptr            SingletonBlob::_vpntr;
  80 const DeoptimizationBlob::Vptr       DeoptimizationBlob::_vpntr;
  81 #ifdef COMPILER2
  82 const ExceptionBlob::Vptr            ExceptionBlob::_vpntr;
  83 #endif // COMPILER2
  84 const UpcallStub::Vptr               UpcallStub::_vpntr;
  85 
  86 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
  87   constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
  88       nullptr/* None */,
  89       &nmethod::_vpntr,
  90       &BufferBlob::_vpntr,
  91       &AdapterBlob::_vpntr,
  92       &VtableBlob::_vpntr,
  93       &MethodHandlesAdapterBlob::_vpntr,
  94       &BufferedInlineTypeBlob::_vpntr,
  95       &RuntimeStub::_vpntr,
  96       &DeoptimizationBlob::_vpntr,
  97       &SafepointBlob::_vpntr,
  98 #ifdef COMPILER2
  99       &ExceptionBlob::_vpntr,
 100       &UncommonTrapBlob::_vpntr,
 101 #endif
 102       &UpcallStub::_vpntr
 103   };
 104 
 105   return array[(size_t)kind];
 106 }
 107 
 108 const CodeBlob::Vptr* CodeBlob::vptr() const {
 109   return vptr(_kind);
 110 }
 111 
 112 unsigned int CodeBlob::align_code_offset(int offset) {
 113   // align the size to CodeEntryAlignment
 114   int header_size = (int)CodeHeap::header_size();

 410   // Track memory usage statistic after releasing CodeCache_lock
 411   MemoryService::track_code_cache_memory_usage();
 412 
 413   return blob;
 414 }
 415 
 416 
 417 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
 418   : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
 419 {}
 420 
 421 // Used by gtest
 422 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
 423   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 424 
 425   BufferBlob* blob = nullptr;
 426   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
 427   assert(name != nullptr, "must provide a name");
 428   {
 429     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 430     blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
 431   }
 432   // Track memory usage statistic after releasing CodeCache_lock
 433   MemoryService::track_code_cache_memory_usage();
 434 
 435   return blob;
 436 }
 437 
 438 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
 439   return CodeCache::allocate(size, CodeBlobType::NonNMethod);
 440 }
 441 
 442 void BufferBlob::free(BufferBlob *blob) {
 443   RuntimeBlob::free(blob);
 444 }
 445 
 446 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
 447   : RuntimeBlob(name, kind, cb, size, header_size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 448 {}
 449 
 450 
 451 //----------------------------------------------------------------------------------------------------
 452 // Implementation of AdapterBlob
 453 
 454 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
 455   BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {

 456 #ifdef ASSERT
 457   assert(entry_offset[I2C] == 0, "sanity check");
 458   for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
 459     // The entry is within the adapter blob or unset.
 460     int offset = entry_offset[i];
 461     assert((offset > 0 && offset < cb->insts()->size()) ||
 462            (i >= C2I_No_Clinit_Check && offset == -1),
 463            "invalid entry offset[%d] = 0x%x", i, offset);
 464   }
 465 #endif // ASSERT
 466   _c2i_offset = entry_offset[C2I];
 467   _c2i_inline_offset = entry_offset[C2I_Inline];
 468   _c2i_inline_ro_offset = entry_offset[C2I_Inline_RO];
 469   _c2i_unverified_offset = entry_offset[C2I_Unverified];
 470   _c2i_unverified_inline_offset = entry_offset[C2I_Unverified_Inline];
 471   _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
 472   CodeCache::commit(this);
 473 }
 474 
 475 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
 476   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 477 
 478   CodeCache::gc_on_allocation();
 479 
 480   AdapterBlob* blob = nullptr;
 481   unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
 482   {
 483     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 484     blob = new (size) AdapterBlob(size, cb, entry_offset, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
 485   }
 486   // Track memory usage statistic after releasing CodeCache_lock
 487   MemoryService::track_code_cache_memory_usage();
 488 
 489   return blob;
 490 }
 491 
 492 //----------------------------------------------------------------------------------------------------
 493 // Implementation of VtableBlob
 494 
 495 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
 496   // Handling of allocation failure stops compilation and prints a bunch of
 497   // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
 498   // can be locked, and then re-locking the CodeCache_lock. That is not safe in
 499   // this context as we hold the CompiledICLocker. So we just don't handle code
 500   // cache exhaustion here; we leave that for a later allocation that does not
 501   // hold the CompiledICLocker.
 502   return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
 503 }
 504 

 545   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 546 
 547   MethodHandlesAdapterBlob* blob = nullptr;
 548   unsigned int size = sizeof(MethodHandlesAdapterBlob);
 549   // align the size to CodeEntryAlignment
 550   size = CodeBlob::align_code_offset(size);
 551   size += align_up(buffer_size, oopSize);
 552   {
 553     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 554     blob = new (size) MethodHandlesAdapterBlob(size);
 555     if (blob == nullptr) {
 556       vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
 557     }
 558   }
 559   // Track memory usage statistic after releasing CodeCache_lock
 560   MemoryService::track_code_cache_memory_usage();
 561 
 562   return blob;
 563 }
 564 
 565 //----------------------------------------------------------------------------------------------------
 566 // Implementation of BufferedInlineTypeBlob
 567 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
 568   BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
 569   _pack_fields_off(pack_fields_off),
 570   _pack_fields_jobject_off(pack_fields_jobject_off),
 571   _unpack_fields_off(unpack_fields_off) {
 572   CodeCache::commit(this);
 573 }
 574 
 575 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
 576   ThreadInVMfromUnknown __tiv;  // get to VM state in case we block on CodeCache_lock
 577 
 578   BufferedInlineTypeBlob* blob = nullptr;
 579   unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
 580   {
 581     MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
 582     blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
 583   }
 584   // Track memory usage statistic after releasing CodeCache_lock
 585   MemoryService::track_code_cache_memory_usage();
 586 
 587   return blob;
 588 }
 589 
 590 //----------------------------------------------------------------------------------------------------
 591 // Implementation of RuntimeStub
 592 
 593 RuntimeStub::RuntimeStub(
 594   const char* name,
 595   CodeBuffer* cb,
 596   int         size,
 597   int16_t     frame_complete,
 598   int         frame_size,
 599   OopMapSet*  oop_maps,
 600   bool        caller_must_gc_arguments
 601 )
 602 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
 603               frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
 604 {
 605 }
 606 
 607 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
 608                                            CodeBuffer* cb,
 609                                            int16_t frame_complete,
< prev index next >