46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
69 #ifdef COMPILER2
70 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
72 #endif
73
74 // Add proxy vtables.
75 // We need only few for now - they are used only from prints.
76 const nmethod::Vptr nmethod::_vpntr;
77 const BufferBlob::Vptr BufferBlob::_vpntr;
78 const RuntimeStub::Vptr RuntimeStub::_vpntr;
79 const SingletonBlob::Vptr SingletonBlob::_vpntr;
80 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
81 #ifdef COMPILER2
82 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
83 #endif // COMPILER2
84 const UpcallStub::Vptr UpcallStub::_vpntr;
85
86 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
87 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
88 nullptr/* None */,
89 &nmethod::_vpntr,
90 &BufferBlob::_vpntr,
91 &AdapterBlob::_vpntr,
92 &VtableBlob::_vpntr,
93 &MethodHandlesAdapterBlob::_vpntr,
94 &RuntimeStub::_vpntr,
95 &DeoptimizationBlob::_vpntr,
96 &SafepointBlob::_vpntr,
97 #ifdef COMPILER2
98 &ExceptionBlob::_vpntr,
99 &UncommonTrapBlob::_vpntr,
100 #endif
101 &UpcallStub::_vpntr
102 };
103
104 return array[(size_t)kind];
105 }
106
107 const CodeBlob::Vptr* CodeBlob::vptr() const {
108 return vptr(_kind);
109 }
110
111 unsigned int CodeBlob::align_code_offset(int offset) {
112 // align the size to CodeEntryAlignment
113 int header_size = (int)CodeHeap::header_size();
409 // Track memory usage statistic after releasing CodeCache_lock
410 MemoryService::track_code_cache_memory_usage();
411
412 return blob;
413 }
414
415
416 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
417 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
418 {}
419
420 // Used by gtest
421 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
422 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
423
424 BufferBlob* blob = nullptr;
425 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
426 assert(name != nullptr, "must provide a name");
427 {
428 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
429 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
430 }
431 // Track memory usage statistic after releasing CodeCache_lock
432 MemoryService::track_code_cache_memory_usage();
433
434 return blob;
435 }
436
437 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
438 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
439 }
440
441 void BufferBlob::free(BufferBlob *blob) {
442 RuntimeBlob::free(blob);
443 }
444
445
446 //----------------------------------------------------------------------------------------------------
447 // Implementation of AdapterBlob
448
449 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) :
450 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob)) {
451 assert(entry_offset[I2C] == 0, "sanity check");
452 #ifdef ASSERT
453 for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
454 // The entry is within the adapter blob or unset.
455 int offset = entry_offset[i];
456 assert((offset > 0 && offset < cb->insts()->size()) ||
457 (i >= C2I_No_Clinit_Check && offset == -1),
458 "invalid entry offset[%d] = 0x%x", i, offset);
459 }
460 #endif // ASSERT
461 _c2i_offset = entry_offset[C2I];
462 _c2i_unverified_offset = entry_offset[C2I_Unverified];
463 _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
464 CodeCache::commit(this);
465 }
466
467 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) {
468 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
469
470 CodeCache::gc_on_allocation();
471
472 AdapterBlob* blob = nullptr;
473 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
474 {
475 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
476 blob = new (size) AdapterBlob(size, cb, entry_offset);
477 }
478 // Track memory usage statistic after releasing CodeCache_lock
479 MemoryService::track_code_cache_memory_usage();
480
481 return blob;
482 }
483
484 //----------------------------------------------------------------------------------------------------
485 // Implementation of VtableBlob
486
487 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
488 // Handling of allocation failure stops compilation and prints a bunch of
489 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
490 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
491 // this context as we hold the CompiledICLocker. So we just don't handle code
492 // cache exhaustion here; we leave that for a later allocation that does not
493 // hold the CompiledICLocker.
494 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
495 }
496
537 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
538
539 MethodHandlesAdapterBlob* blob = nullptr;
540 unsigned int size = sizeof(MethodHandlesAdapterBlob);
541 // align the size to CodeEntryAlignment
542 size = CodeBlob::align_code_offset(size);
543 size += align_up(buffer_size, oopSize);
544 {
545 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
546 blob = new (size) MethodHandlesAdapterBlob(size);
547 if (blob == nullptr) {
548 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
549 }
550 }
551 // Track memory usage statistic after releasing CodeCache_lock
552 MemoryService::track_code_cache_memory_usage();
553
554 return blob;
555 }
556
557 //----------------------------------------------------------------------------------------------------
558 // Implementation of RuntimeStub
559
560 RuntimeStub::RuntimeStub(
561 const char* name,
562 CodeBuffer* cb,
563 int size,
564 int16_t frame_complete,
565 int frame_size,
566 OopMapSet* oop_maps,
567 bool caller_must_gc_arguments
568 )
569 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
570 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
571 {
572 }
573
574 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
575 CodeBuffer* cb,
576 int16_t frame_complete,
|
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
69 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
70 #ifdef COMPILER2
71 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
73 #endif
74
75 // Add proxy vtables.
76 // We need only few for now - they are used only from prints.
77 const nmethod::Vptr nmethod::_vpntr;
78 const BufferBlob::Vptr BufferBlob::_vpntr;
79 const RuntimeStub::Vptr RuntimeStub::_vpntr;
80 const SingletonBlob::Vptr SingletonBlob::_vpntr;
81 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
82 #ifdef COMPILER2
83 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
84 #endif // COMPILER2
85 const UpcallStub::Vptr UpcallStub::_vpntr;
86
87 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
88 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
89 nullptr/* None */,
90 &nmethod::_vpntr,
91 &BufferBlob::_vpntr,
92 &AdapterBlob::_vpntr,
93 &VtableBlob::_vpntr,
94 &MethodHandlesAdapterBlob::_vpntr,
95 &BufferedInlineTypeBlob::_vpntr,
96 &RuntimeStub::_vpntr,
97 &DeoptimizationBlob::_vpntr,
98 &SafepointBlob::_vpntr,
99 #ifdef COMPILER2
100 &ExceptionBlob::_vpntr,
101 &UncommonTrapBlob::_vpntr,
102 #endif
103 &UpcallStub::_vpntr
104 };
105
106 return array[(size_t)kind];
107 }
108
109 const CodeBlob::Vptr* CodeBlob::vptr() const {
110 return vptr(_kind);
111 }
112
113 unsigned int CodeBlob::align_code_offset(int offset) {
114 // align the size to CodeEntryAlignment
115 int header_size = (int)CodeHeap::header_size();
411 // Track memory usage statistic after releasing CodeCache_lock
412 MemoryService::track_code_cache_memory_usage();
413
414 return blob;
415 }
416
417
418 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
419 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
420 {}
421
422 // Used by gtest
423 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
424 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
425
426 BufferBlob* blob = nullptr;
427 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
428 assert(name != nullptr, "must provide a name");
429 {
430 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
431 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
432 }
433 // Track memory usage statistic after releasing CodeCache_lock
434 MemoryService::track_code_cache_memory_usage();
435
436 return blob;
437 }
438
439 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
440 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
441 }
442
443 void BufferBlob::free(BufferBlob *blob) {
444 RuntimeBlob::free(blob);
445 }
446
447 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
448 : RuntimeBlob(name, kind, cb, size, header_size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
449 {}
450
451
452 //----------------------------------------------------------------------------------------------------
453 // Implementation of AdapterBlob
454
455 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
456 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
457 #ifdef ASSERT
458 assert(entry_offset[I2C] == 0, "sanity check");
459 for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
460 // The entry is within the adapter blob or unset.
461 int offset = entry_offset[i];
462 assert((offset > 0 && offset < cb->insts()->size()) ||
463 (i >= C2I_No_Clinit_Check && offset == -1),
464 "invalid entry offset[%d] = 0x%x", i, offset);
465 }
466 #endif // ASSERT
467 _c2i_offset = entry_offset[C2I];
468 _c2i_inline_offset = entry_offset[C2I_Inline];
469 _c2i_inline_ro_offset = entry_offset[C2I_Inline_RO];
470 _c2i_unverified_offset = entry_offset[C2I_Unverified];
471 _c2i_unverified_inline_offset = entry_offset[C2I_Unverified_Inline];
472 _c2i_no_clinit_check_offset = entry_offset[C2I_No_Clinit_Check];
473 CodeCache::commit(this);
474 }
475
476 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
477 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
478
479 CodeCache::gc_on_allocation();
480
481 AdapterBlob* blob = nullptr;
482 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
483 {
484 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
485 blob = new (size) AdapterBlob(size, cb, entry_offset, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
486 }
487 // Track memory usage statistic after releasing CodeCache_lock
488 MemoryService::track_code_cache_memory_usage();
489
490 return blob;
491 }
492
493 //----------------------------------------------------------------------------------------------------
494 // Implementation of VtableBlob
495
496 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
497 // Handling of allocation failure stops compilation and prints a bunch of
498 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
499 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
500 // this context as we hold the CompiledICLocker. So we just don't handle code
501 // cache exhaustion here; we leave that for a later allocation that does not
502 // hold the CompiledICLocker.
503 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
504 }
505
546 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
547
548 MethodHandlesAdapterBlob* blob = nullptr;
549 unsigned int size = sizeof(MethodHandlesAdapterBlob);
550 // align the size to CodeEntryAlignment
551 size = CodeBlob::align_code_offset(size);
552 size += align_up(buffer_size, oopSize);
553 {
554 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
555 blob = new (size) MethodHandlesAdapterBlob(size);
556 if (blob == nullptr) {
557 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
558 }
559 }
560 // Track memory usage statistic after releasing CodeCache_lock
561 MemoryService::track_code_cache_memory_usage();
562
563 return blob;
564 }
565
566 //----------------------------------------------------------------------------------------------------
567 // Implementation of BufferedInlineTypeBlob
568 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
569 BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
570 _pack_fields_off(pack_fields_off),
571 _pack_fields_jobject_off(pack_fields_jobject_off),
572 _unpack_fields_off(unpack_fields_off) {
573 CodeCache::commit(this);
574 }
575
576 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
577 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
578
579 BufferedInlineTypeBlob* blob = nullptr;
580 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
581 {
582 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
583 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
584 }
585 // Track memory usage statistic after releasing CodeCache_lock
586 MemoryService::track_code_cache_memory_usage();
587
588 return blob;
589 }
590
591 //----------------------------------------------------------------------------------------------------
592 // Implementation of RuntimeStub
593
594 RuntimeStub::RuntimeStub(
595 const char* name,
596 CodeBuffer* cb,
597 int size,
598 int16_t frame_complete,
599 int frame_size,
600 OopMapSet* oop_maps,
601 bool caller_must_gc_arguments
602 )
603 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
604 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
605 {
606 }
607
608 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
609 CodeBuffer* cb,
610 int16_t frame_complete,
|