46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
69 #ifdef COMPILER2
70 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
72 #endif
73
74 // Add proxy vtables.
75 // We need only few for now - they are used only from prints.
76 const nmethod::Vptr nmethod::_vpntr;
77 const BufferBlob::Vptr BufferBlob::_vpntr;
78 const RuntimeStub::Vptr RuntimeStub::_vpntr;
79 const SingletonBlob::Vptr SingletonBlob::_vpntr;
80 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
81 #ifdef COMPILER2
82 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
83 #endif // COMPILER2
84 const UpcallStub::Vptr UpcallStub::_vpntr;
85
86 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
87 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
88 nullptr/* None */,
89 &nmethod::_vpntr,
90 &BufferBlob::_vpntr,
91 &AdapterBlob::_vpntr,
92 &VtableBlob::_vpntr,
93 &MethodHandlesAdapterBlob::_vpntr,
94 &RuntimeStub::_vpntr,
95 &DeoptimizationBlob::_vpntr,
96 &SafepointBlob::_vpntr,
97 #ifdef COMPILER2
98 &ExceptionBlob::_vpntr,
99 &UncommonTrapBlob::_vpntr,
100 #endif
101 &UpcallStub::_vpntr
102 };
103
104 return array[(size_t)kind];
105 }
106
107 const CodeBlob::Vptr* CodeBlob::vptr() const {
108 return vptr(_kind);
109 }
110
111 unsigned int CodeBlob::align_code_offset(int offset) {
112 // align the size to CodeEntryAlignment
113 int header_size = (int)CodeHeap::header_size();
392 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
393 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
394
395 BufferBlob* blob = nullptr;
396 unsigned int size = sizeof(BufferBlob);
397 // align the size to CodeEntryAlignment
398 size = CodeBlob::align_code_offset(size);
399 size += align_up(buffer_size, oopSize);
400 assert(name != nullptr, "must provide a name");
401 {
402 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
403 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
404 }
405 // Track memory usage statistic after releasing CodeCache_lock
406 MemoryService::track_code_cache_memory_usage();
407
408 return blob;
409 }
410
411
412 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size)
413 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), CodeOffsets::frame_never_safe, 0, nullptr)
414 {}
415
416 // Used by gtest
417 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
418 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
419
420 BufferBlob* blob = nullptr;
421 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
422 assert(name != nullptr, "must provide a name");
423 {
424 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
425 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
426 }
427 // Track memory usage statistic after releasing CodeCache_lock
428 MemoryService::track_code_cache_memory_usage();
429
430 return blob;
431 }
432
433 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
434 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
435 }
436
437 void BufferBlob::free(BufferBlob *blob) {
438 RuntimeBlob::free(blob);
439 }
440
441
442 //----------------------------------------------------------------------------------------------------
443 // Implementation of AdapterBlob
444
445 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
446 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size) {
447 CodeCache::commit(this);
448 }
449
450 AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
451 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
452
453 CodeCache::gc_on_allocation();
454
455 AdapterBlob* blob = nullptr;
456 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
457 {
458 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
459 blob = new (size) AdapterBlob(size, cb);
460 }
461 // Track memory usage statistic after releasing CodeCache_lock
462 MemoryService::track_code_cache_memory_usage();
463
464 return blob;
465 }
466
467 //----------------------------------------------------------------------------------------------------
468 // Implementation of VtableBlob
469
470 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
471 // Handling of allocation failure stops compilation and prints a bunch of
472 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
473 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
474 // this context as we hold the CompiledICLocker. So we just don't handle code
475 // cache exhaustion here; we leave that for a later allocation that does not
476 // hold the CompiledICLocker.
477 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
478 }
479
521
522 MethodHandlesAdapterBlob* blob = nullptr;
523 unsigned int size = sizeof(MethodHandlesAdapterBlob);
524 // align the size to CodeEntryAlignment
525 size = CodeBlob::align_code_offset(size);
526 size += align_up(buffer_size, oopSize);
527 {
528 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
529 blob = new (size) MethodHandlesAdapterBlob(size);
530 if (blob == nullptr) {
531 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
532 }
533 }
534 // Track memory usage statistic after releasing CodeCache_lock
535 MemoryService::track_code_cache_memory_usage();
536
537 return blob;
538 }
539
540 //----------------------------------------------------------------------------------------------------
541 // Implementation of RuntimeStub
542
543 RuntimeStub::RuntimeStub(
544 const char* name,
545 CodeBuffer* cb,
546 int size,
547 int16_t frame_complete,
548 int frame_size,
549 OopMapSet* oop_maps,
550 bool caller_must_gc_arguments
551 )
552 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
553 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
554 {
555 }
556
557 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
558 CodeBuffer* cb,
559 int16_t frame_complete,
560 int frame_size,
|
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
69 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
70 #ifdef COMPILER2
71 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
73 #endif
74
75 // Add proxy vtables.
76 // We need only few for now - they are used only from prints.
77 const nmethod::Vptr nmethod::_vpntr;
78 const BufferBlob::Vptr BufferBlob::_vpntr;
79 const RuntimeStub::Vptr RuntimeStub::_vpntr;
80 const SingletonBlob::Vptr SingletonBlob::_vpntr;
81 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
82 #ifdef COMPILER2
83 const ExceptionBlob::Vptr ExceptionBlob::_vpntr;
84 #endif // COMPILER2
85 const UpcallStub::Vptr UpcallStub::_vpntr;
86
87 const CodeBlob::Vptr* CodeBlob::vptr(CodeBlobKind kind) {
88 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
89 nullptr/* None */,
90 &nmethod::_vpntr,
91 &BufferBlob::_vpntr,
92 &AdapterBlob::_vpntr,
93 &VtableBlob::_vpntr,
94 &MethodHandlesAdapterBlob::_vpntr,
95 &BufferedInlineTypeBlob::_vpntr,
96 &RuntimeStub::_vpntr,
97 &DeoptimizationBlob::_vpntr,
98 &SafepointBlob::_vpntr,
99 #ifdef COMPILER2
100 &ExceptionBlob::_vpntr,
101 &UncommonTrapBlob::_vpntr,
102 #endif
103 &UpcallStub::_vpntr
104 };
105
106 return array[(size_t)kind];
107 }
108
109 const CodeBlob::Vptr* CodeBlob::vptr() const {
110 return vptr(_kind);
111 }
112
113 unsigned int CodeBlob::align_code_offset(int offset) {
114 // align the size to CodeEntryAlignment
115 int header_size = (int)CodeHeap::header_size();
394 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
395 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
396
397 BufferBlob* blob = nullptr;
398 unsigned int size = sizeof(BufferBlob);
399 // align the size to CodeEntryAlignment
400 size = CodeBlob::align_code_offset(size);
401 size += align_up(buffer_size, oopSize);
402 assert(name != nullptr, "must provide a name");
403 {
404 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
405 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
406 }
407 // Track memory usage statistic after releasing CodeCache_lock
408 MemoryService::track_code_cache_memory_usage();
409
410 return blob;
411 }
412
413
414 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size)
415 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
416 {}
417
418 // Used by gtest
419 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
420 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
421
422 BufferBlob* blob = nullptr;
423 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
424 assert(name != nullptr, "must provide a name");
425 {
426 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
427 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
428 }
429 // Track memory usage statistic after releasing CodeCache_lock
430 MemoryService::track_code_cache_memory_usage();
431
432 return blob;
433 }
434
435 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
436 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
437 }
438
439 void BufferBlob::free(BufferBlob *blob) {
440 RuntimeBlob::free(blob);
441 }
442
443 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
444 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
445 {}
446
447
448 //----------------------------------------------------------------------------------------------------
449 // Implementation of AdapterBlob
450
451 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
452 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
453 CodeCache::commit(this);
454 }
455
456 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
457 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
458
459 CodeCache::gc_on_allocation();
460
461 AdapterBlob* blob = nullptr;
462 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
463 {
464 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
465 blob = new (size) AdapterBlob(size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
466 }
467 // Track memory usage statistic after releasing CodeCache_lock
468 MemoryService::track_code_cache_memory_usage();
469
470 return blob;
471 }
472
473 //----------------------------------------------------------------------------------------------------
474 // Implementation of VtableBlob
475
476 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
477 // Handling of allocation failure stops compilation and prints a bunch of
478 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
479 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
480 // this context as we hold the CompiledICLocker. So we just don't handle code
481 // cache exhaustion here; we leave that for a later allocation that does not
482 // hold the CompiledICLocker.
483 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
484 }
485
527
528 MethodHandlesAdapterBlob* blob = nullptr;
529 unsigned int size = sizeof(MethodHandlesAdapterBlob);
530 // align the size to CodeEntryAlignment
531 size = CodeBlob::align_code_offset(size);
532 size += align_up(buffer_size, oopSize);
533 {
534 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
535 blob = new (size) MethodHandlesAdapterBlob(size);
536 if (blob == nullptr) {
537 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
538 }
539 }
540 // Track memory usage statistic after releasing CodeCache_lock
541 MemoryService::track_code_cache_memory_usage();
542
543 return blob;
544 }
545
546 //----------------------------------------------------------------------------------------------------
547 // Implementation of BufferedInlineTypeBlob
548 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
549 BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
550 _pack_fields_off(pack_fields_off),
551 _pack_fields_jobject_off(pack_fields_jobject_off),
552 _unpack_fields_off(unpack_fields_off) {
553 CodeCache::commit(this);
554 }
555
556 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
557 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
558
559 BufferedInlineTypeBlob* blob = nullptr;
560 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
561 {
562 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
563 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
564 }
565 // Track memory usage statistic after releasing CodeCache_lock
566 MemoryService::track_code_cache_memory_usage();
567
568 return blob;
569 }
570
571 //----------------------------------------------------------------------------------------------------
572 // Implementation of RuntimeStub
573
574 RuntimeStub::RuntimeStub(
575 const char* name,
576 CodeBuffer* cb,
577 int size,
578 int16_t frame_complete,
579 int frame_size,
580 OopMapSet* oop_maps,
581 bool caller_must_gc_arguments
582 )
583 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
584 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
585 {
586 }
587
588 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
589 CodeBuffer* cb,
590 int16_t frame_complete,
591 int frame_size,
|