46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
69 #ifdef COMPILER2
70 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
72 #endif
73
74 // Add proxy vtables.
75 // We need only few for now - they are used only from prints.
76 const nmethod::Vptr nmethod::_vpntr;
77 const BufferBlob::Vptr BufferBlob::_vpntr;
78 const RuntimeStub::Vptr RuntimeStub::_vpntr;
79 const SingletonBlob::Vptr SingletonBlob::_vpntr;
80 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
81 #ifdef COMPILER2
82 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
83 #endif // COMPILER2
84 const UpcallStub::Vptr UpcallStub::_vpntr;
85
86 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
87 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
88 nullptr/* None */,
89 &nmethod::_vpntr,
90 &BufferBlob::_vpntr,
91 &AdapterBlob::_vpntr,
92 &VtableBlob::_vpntr,
93 &MethodHandlesAdapterBlob::_vpntr,
94 &RuntimeStub::_vpntr,
95 &DeoptimizationBlob::_vpntr,
96 &SafepointBlob::_vpntr,
97 #ifdef COMPILER2
98 &ExceptionBlob::_vpntr,
99 &UncommonTrapBlob::_vpntr,
100 #endif
101 &UpcallStub::_vpntr
102 };
103
104 return array[(size_t)kind];
105 }
106
107 const CodeBlob::Vptr* CodeBlob::vptr() const {
108 return vptr(_kind);
109 }
110
111 unsigned int CodeBlob::align_code_offset(int offset) {
112 // align the size to CodeEntryAlignment
113 int header_size = (int)CodeHeap::header_size();
409 // Track memory usage statistic after releasing CodeCache_lock
410 MemoryService::track_code_cache_memory_usage();
411
412 return blob;
413 }
414
415
416 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
417 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
418 {}
419
420 // Used by gtest
421 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
422 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
423
424 BufferBlob* blob = nullptr;
425 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
426 assert(name != nullptr, "must provide a name");
427 {
428 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
429 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
430 }
431 // Track memory usage statistic after releasing CodeCache_lock
432 MemoryService::track_code_cache_memory_usage();
433
434 return blob;
435 }
436
437 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
438 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
439 }
440
441 void BufferBlob::free(BufferBlob *blob) {
442 RuntimeBlob::free(blob);
443 }
444
445
446 //----------------------------------------------------------------------------------------------------
447 // Implementation of AdapterBlob
448
449 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) :
450 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob)) {
451 assert(entry_offset[0] == 0, "sanity check");
452 for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
453 // The entry is within the adapter blob or unset.
454 assert((entry_offset[i] > 0 && entry_offset[i] < cb->insts()->size()) ||
455 (entry_offset[i] == -1),
456 "invalid entry offset[%d] = 0x%x", i, entry_offset[i]);
457 }
458 _c2i_offset = entry_offset[1];
459 _c2i_unverified_offset = entry_offset[2];
460 _c2i_no_clinit_check_offset = entry_offset[3];
461 CodeCache::commit(this);
462 }
463
464 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT]) {
465 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
466
467 CodeCache::gc_on_allocation();
468
469 AdapterBlob* blob = nullptr;
470 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
471 {
472 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
473 blob = new (size) AdapterBlob(size, cb, entry_offset);
474 }
475 // Track memory usage statistic after releasing CodeCache_lock
476 MemoryService::track_code_cache_memory_usage();
477
478 return blob;
479 }
480
481 void AdapterBlob::get_offsets(int entry_offset[ENTRY_COUNT]) {
482 entry_offset[0] = 0;
483 entry_offset[1] = _c2i_offset;
484 entry_offset[2] = _c2i_unverified_offset;
485 entry_offset[3] = _c2i_no_clinit_check_offset;
486 }
487
488 //----------------------------------------------------------------------------------------------------
489 // Implementation of VtableBlob
490
491 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
492 // Handling of allocation failure stops compilation and prints a bunch of
493 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
494 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
495 // this context as we hold the CompiledICLocker. So we just don't handle code
496 // cache exhaustion here; we leave that for a later allocation that does not
497 // hold the CompiledICLocker.
498 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
499 }
500
501 VtableBlob::VtableBlob(const char* name, int size) :
502 BufferBlob(name, CodeBlobKind::Vtable, size) {
503 }
504
505 VtableBlob* VtableBlob::create(const char* name, int buffer_size) {
542
543 MethodHandlesAdapterBlob* blob = nullptr;
544 unsigned int size = sizeof(MethodHandlesAdapterBlob);
545 // align the size to CodeEntryAlignment
546 size = CodeBlob::align_code_offset(size);
547 size += align_up(buffer_size, oopSize);
548 {
549 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
550 blob = new (size) MethodHandlesAdapterBlob(size);
551 if (blob == nullptr) {
552 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
553 }
554 }
555 // Track memory usage statistic after releasing CodeCache_lock
556 MemoryService::track_code_cache_memory_usage();
557
558 return blob;
559 }
560
561 //----------------------------------------------------------------------------------------------------
562 // Implementation of RuntimeStub
563
564 RuntimeStub::RuntimeStub(
565 const char* name,
566 CodeBuffer* cb,
567 int size,
568 int16_t frame_complete,
569 int frame_size,
570 OopMapSet* oop_maps,
571 bool caller_must_gc_arguments
572 )
573 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
574 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
575 {
576 }
577
578 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
579 CodeBuffer* cb,
580 int16_t frame_complete,
581 int frame_size,
|
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
69 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
70 #ifdef COMPILER2
71 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
73 #endif
74
75 // Add proxy vtables.
76 // We need only few for now - they are used only from prints.
77 const nmethod::Vptr nmethod::_vpntr;
78 const BufferBlob::Vptr BufferBlob::_vpntr;
79 const RuntimeStub::Vptr RuntimeStub::_vpntr;
80 const SingletonBlob::Vptr SingletonBlob::_vpntr;
81 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
82 #ifdef COMPILER2
83 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
84 #endif // COMPILER2
85 const UpcallStub::Vptr UpcallStub::_vpntr;
86
87 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
88 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
89 nullptr/* None */,
90 &nmethod::_vpntr,
91 &BufferBlob::_vpntr,
92 &AdapterBlob::_vpntr,
93 &VtableBlob::_vpntr,
94 &MethodHandlesAdapterBlob::_vpntr,
95 &BufferedInlineTypeBlob::_vpntr,
96 &RuntimeStub::_vpntr,
97 &DeoptimizationBlob::_vpntr,
98 &SafepointBlob::_vpntr,
99 #ifdef COMPILER2
100 &ExceptionBlob::_vpntr,
101 &UncommonTrapBlob::_vpntr,
102 #endif
103 &UpcallStub::_vpntr
104 };
105
106 return array[(size_t)kind];
107 }
108
109 const CodeBlob::Vptr* CodeBlob::vptr() const {
110 return vptr(_kind);
111 }
112
113 unsigned int CodeBlob::align_code_offset(int offset) {
114 // align the size to CodeEntryAlignment
115 int header_size = (int)CodeHeap::header_size();
411 // Track memory usage statistic after releasing CodeCache_lock
412 MemoryService::track_code_cache_memory_usage();
413
414 return blob;
415 }
416
417
418 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size)
419 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
420 {}
421
422 // Used by gtest
423 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
424 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
425
426 BufferBlob* blob = nullptr;
427 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
428 assert(name != nullptr, "must provide a name");
429 {
430 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
431 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
432 }
433 // Track memory usage statistic after releasing CodeCache_lock
434 MemoryService::track_code_cache_memory_usage();
435
436 return blob;
437 }
438
439 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
440 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
441 }
442
443 void BufferBlob::free(BufferBlob *blob) {
444 RuntimeBlob::free(blob);
445 }
446
447 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
448 : RuntimeBlob(name, kind, cb, size, header_size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
449 {}
450
451
452 //----------------------------------------------------------------------------------------------------
453 // Implementation of AdapterBlob
454
455 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
456 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, sizeof(AdapterBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
457 assert(entry_offset[0] == 0, "sanity check");
458 for (int i = 1; i < AdapterBlob::ENTRY_COUNT; i++) {
459 // The entry is within the adapter blob or unset.
460 assert((entry_offset[i] > 0 && entry_offset[i] < cb->insts()->size()) ||
461 (entry_offset[i] == -1),
462 "invalid entry offset[%d] = 0x%x", i, entry_offset[i]);
463 }
464 _c2i_offset = entry_offset[1];
465 _c2i_inline_offset = entry_offset[2];
466 _c2i_inline_ro_offset = entry_offset[3];
467 _c2i_unverified_offset = entry_offset[4];
468 _c2i_unverified_inline_offset = entry_offset[5];
469 _c2i_no_clinit_check_offset = entry_offset[6];
470 CodeCache::commit(this);
471 }
472
473 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int entry_offset[AdapterBlob::ENTRY_COUNT], int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
474 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
475
476 CodeCache::gc_on_allocation();
477
478 AdapterBlob* blob = nullptr;
479 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
480 {
481 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
482 blob = new (size) AdapterBlob(size, cb, entry_offset, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
483 }
484 // Track memory usage statistic after releasing CodeCache_lock
485 MemoryService::track_code_cache_memory_usage();
486
487 return blob;
488 }
489
490 void AdapterBlob::get_offsets(int entry_offset[ENTRY_COUNT]) {
491 entry_offset[0] = 0;
492 entry_offset[1] = _c2i_offset;
493 entry_offset[2] = _c2i_inline_offset;
494 entry_offset[3] = _c2i_inline_ro_offset;
495 entry_offset[4] = _c2i_unverified_offset;
496 entry_offset[5] = _c2i_unverified_inline_offset;
497 entry_offset[6] = _c2i_no_clinit_check_offset;
498 }
499
500 //----------------------------------------------------------------------------------------------------
501 // Implementation of VtableBlob
502
503 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
504 // Handling of allocation failure stops compilation and prints a bunch of
505 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
506 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
507 // this context as we hold the CompiledICLocker. So we just don't handle code
508 // cache exhaustion here; we leave that for a later allocation that does not
509 // hold the CompiledICLocker.
510 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
511 }
512
513 VtableBlob::VtableBlob(const char* name, int size) :
514 BufferBlob(name, CodeBlobKind::Vtable, size) {
515 }
516
517 VtableBlob* VtableBlob::create(const char* name, int buffer_size) {
554
555 MethodHandlesAdapterBlob* blob = nullptr;
556 unsigned int size = sizeof(MethodHandlesAdapterBlob);
557 // align the size to CodeEntryAlignment
558 size = CodeBlob::align_code_offset(size);
559 size += align_up(buffer_size, oopSize);
560 {
561 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
562 blob = new (size) MethodHandlesAdapterBlob(size);
563 if (blob == nullptr) {
564 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
565 }
566 }
567 // Track memory usage statistic after releasing CodeCache_lock
568 MemoryService::track_code_cache_memory_usage();
569
570 return blob;
571 }
572
573 //----------------------------------------------------------------------------------------------------
574 // Implementation of BufferedInlineTypeBlob
575 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
576 BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
577 _pack_fields_off(pack_fields_off),
578 _pack_fields_jobject_off(pack_fields_jobject_off),
579 _unpack_fields_off(unpack_fields_off) {
580 CodeCache::commit(this);
581 }
582
583 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
584 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
585
586 BufferedInlineTypeBlob* blob = nullptr;
587 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
588 {
589 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
590 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
591 }
592 // Track memory usage statistic after releasing CodeCache_lock
593 MemoryService::track_code_cache_memory_usage();
594
595 return blob;
596 }
597
598 //----------------------------------------------------------------------------------------------------
599 // Implementation of RuntimeStub
600
601 RuntimeStub::RuntimeStub(
602 const char* name,
603 CodeBuffer* cb,
604 int size,
605 int16_t frame_complete,
606 int frame_size,
607 OopMapSet* oop_maps,
608 bool caller_must_gc_arguments
609 )
610 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
611 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
612 {
613 }
614
615 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
616 CodeBuffer* cb,
617 int16_t frame_complete,
618 int frame_size,
|