46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
69 #ifdef COMPILER2
70 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
72 #endif
73
74 // Add proxy vtables.
75 // We need only few for now - they are used only from prints.
76 const nmethod::Vptr nmethod::_vpntr;
77 const BufferBlob::Vptr BufferBlob::_vpntr;
78 const RuntimeStub::Vptr RuntimeStub::_vpntr;
79 const SingletonBlob::Vptr SingletonBlob::_vpntr;
80 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
81 const UpcallStub::Vptr UpcallStub::_vpntr;
82
83 const CodeBlob::Vptr* CodeBlob::vptr() const {
84 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
85 nullptr/* None */,
86 &nmethod::_vpntr,
87 &BufferBlob::_vpntr,
88 &AdapterBlob::_vpntr,
89 &VtableBlob::_vpntr,
90 &MethodHandlesAdapterBlob::_vpntr,
91 &RuntimeStub::_vpntr,
92 &DeoptimizationBlob::_vpntr,
93 &SafepointBlob::_vpntr,
94 #ifdef COMPILER2
95 &ExceptionBlob::_vpntr,
96 &UncommonTrapBlob::_vpntr,
97 #endif
98 &UpcallStub::_vpntr
99 };
100
101 return array[(size_t)_kind];
102 }
103
104 unsigned int CodeBlob::align_code_offset(int offset) {
105 // align the size to CodeEntryAlignment
106 int header_size = (int)CodeHeap::header_size();
107 return align_up(offset + header_size, CodeEntryAlignment) - header_size;
108 }
109
110 // This must be consistent with the CodeBlob constructor's layout actions.
276 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
277 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
278
279 BufferBlob* blob = nullptr;
280 unsigned int size = sizeof(BufferBlob);
281 // align the size to CodeEntryAlignment
282 size = CodeBlob::align_code_offset(size);
283 size += align_up(buffer_size, oopSize);
284 assert(name != nullptr, "must provide a name");
285 {
286 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
287 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
288 }
289 // Track memory usage statistic after releasing CodeCache_lock
290 MemoryService::track_code_cache_memory_usage();
291
292 return blob;
293 }
294
295
296 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size)
297 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), CodeOffsets::frame_never_safe, 0, nullptr)
298 {}
299
300 // Used by gtest
301 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
302 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
303
304 BufferBlob* blob = nullptr;
305 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
306 assert(name != nullptr, "must provide a name");
307 {
308 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
309 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
310 }
311 // Track memory usage statistic after releasing CodeCache_lock
312 MemoryService::track_code_cache_memory_usage();
313
314 return blob;
315 }
316
317 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
318 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
319 }
320
321 void BufferBlob::free(BufferBlob *blob) {
322 RuntimeBlob::free(blob);
323 }
324
325
326 //----------------------------------------------------------------------------------------------------
327 // Implementation of AdapterBlob
328
329 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
330 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size) {
331 CodeCache::commit(this);
332 }
333
334 AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
335 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
336
337 CodeCache::gc_on_allocation();
338
339 AdapterBlob* blob = nullptr;
340 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
341 {
342 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
343 blob = new (size) AdapterBlob(size, cb);
344 }
345 // Track memory usage statistic after releasing CodeCache_lock
346 MemoryService::track_code_cache_memory_usage();
347
348 return blob;
349 }
350
351 //----------------------------------------------------------------------------------------------------
352 // Implementation of VtableBlob
353
354 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
355 // Handling of allocation failure stops compilation and prints a bunch of
356 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
357 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
358 // this context as we hold the CompiledICLocker. So we just don't handle code
359 // cache exhaustion here; we leave that for a later allocation that does not
360 // hold the CompiledICLocker.
361 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
362 }
363
401 // Implementation of MethodHandlesAdapterBlob
402
403 MethodHandlesAdapterBlob* MethodHandlesAdapterBlob::create(int buffer_size) {
404 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
405
406 MethodHandlesAdapterBlob* blob = nullptr;
407 unsigned int size = sizeof(MethodHandlesAdapterBlob);
408 // align the size to CodeEntryAlignment
409 size = CodeBlob::align_code_offset(size);
410 size += align_up(buffer_size, oopSize);
411 {
412 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
413 blob = new (size) MethodHandlesAdapterBlob(size);
414 if (blob == nullptr) {
415 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
416 }
417 }
418 // Track memory usage statistic after releasing CodeCache_lock
419 MemoryService::track_code_cache_memory_usage();
420
421 return blob;
422 }
423
424 //----------------------------------------------------------------------------------------------------
425 // Implementation of RuntimeStub
426
427 RuntimeStub::RuntimeStub(
428 const char* name,
429 CodeBuffer* cb,
430 int size,
431 int16_t frame_complete,
432 int frame_size,
433 OopMapSet* oop_maps,
434 bool caller_must_gc_arguments
435 )
436 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
437 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
438 {
439 }
440
|
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
69 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
70 #ifdef COMPILER2
71 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
73 #endif
74
75 // Add proxy vtables.
76 // We need only few for now - they are used only from prints.
77 const nmethod::Vptr nmethod::_vpntr;
78 const BufferBlob::Vptr BufferBlob::_vpntr;
79 const RuntimeStub::Vptr RuntimeStub::_vpntr;
80 const SingletonBlob::Vptr SingletonBlob::_vpntr;
81 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
82 const UpcallStub::Vptr UpcallStub::_vpntr;
83
84 const CodeBlob::Vptr* CodeBlob::vptr() const {
85 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
86 nullptr/* None */,
87 &nmethod::_vpntr,
88 &BufferBlob::_vpntr,
89 &AdapterBlob::_vpntr,
90 &VtableBlob::_vpntr,
91 &MethodHandlesAdapterBlob::_vpntr,
92 &BufferedInlineTypeBlob::_vpntr,
93 &RuntimeStub::_vpntr,
94 &DeoptimizationBlob::_vpntr,
95 &SafepointBlob::_vpntr,
96 #ifdef COMPILER2
97 &ExceptionBlob::_vpntr,
98 &UncommonTrapBlob::_vpntr,
99 #endif
100 &UpcallStub::_vpntr
101 };
102
103 return array[(size_t)_kind];
104 }
105
106 unsigned int CodeBlob::align_code_offset(int offset) {
107 // align the size to CodeEntryAlignment
108 int header_size = (int)CodeHeap::header_size();
109 return align_up(offset + header_size, CodeEntryAlignment) - header_size;
110 }
111
112 // This must be consistent with the CodeBlob constructor's layout actions.
278 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
279 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
280
281 BufferBlob* blob = nullptr;
282 unsigned int size = sizeof(BufferBlob);
283 // align the size to CodeEntryAlignment
284 size = CodeBlob::align_code_offset(size);
285 size += align_up(buffer_size, oopSize);
286 assert(name != nullptr, "must provide a name");
287 {
288 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
289 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
290 }
291 // Track memory usage statistic after releasing CodeCache_lock
292 MemoryService::track_code_cache_memory_usage();
293
294 return blob;
295 }
296
297
298 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size)
299 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
300 {}
301
302 // Used by gtest
303 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
304 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
305
306 BufferBlob* blob = nullptr;
307 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
308 assert(name != nullptr, "must provide a name");
309 {
310 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
311 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
312 }
313 // Track memory usage statistic after releasing CodeCache_lock
314 MemoryService::track_code_cache_memory_usage();
315
316 return blob;
317 }
318
319 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
320 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
321 }
322
323 void BufferBlob::free(BufferBlob *blob) {
324 RuntimeBlob::free(blob);
325 }
326
327 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
328 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
329 {}
330
331
332 //----------------------------------------------------------------------------------------------------
333 // Implementation of AdapterBlob
334
335 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
336 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
337 CodeCache::commit(this);
338 }
339
340 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
341 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
342
343 CodeCache::gc_on_allocation();
344
345 AdapterBlob* blob = nullptr;
346 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
347 {
348 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
349 blob = new (size) AdapterBlob(size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
350 }
351 // Track memory usage statistic after releasing CodeCache_lock
352 MemoryService::track_code_cache_memory_usage();
353
354 return blob;
355 }
356
357 //----------------------------------------------------------------------------------------------------
358 // Implementation of VtableBlob
359
360 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
361 // Handling of allocation failure stops compilation and prints a bunch of
362 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
363 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
364 // this context as we hold the CompiledICLocker. So we just don't handle code
365 // cache exhaustion here; we leave that for a later allocation that does not
366 // hold the CompiledICLocker.
367 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
368 }
369
407 // Implementation of MethodHandlesAdapterBlob
408
409 MethodHandlesAdapterBlob* MethodHandlesAdapterBlob::create(int buffer_size) {
410 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
411
412 MethodHandlesAdapterBlob* blob = nullptr;
413 unsigned int size = sizeof(MethodHandlesAdapterBlob);
414 // align the size to CodeEntryAlignment
415 size = CodeBlob::align_code_offset(size);
416 size += align_up(buffer_size, oopSize);
417 {
418 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
419 blob = new (size) MethodHandlesAdapterBlob(size);
420 if (blob == nullptr) {
421 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
422 }
423 }
424 // Track memory usage statistic after releasing CodeCache_lock
425 MemoryService::track_code_cache_memory_usage();
426
427 return blob;
428 }
429
430 //----------------------------------------------------------------------------------------------------
431 // Implementation of BufferedInlineTypeBlob
432 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
433 BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
434 _pack_fields_off(pack_fields_off),
435 _pack_fields_jobject_off(pack_fields_jobject_off),
436 _unpack_fields_off(unpack_fields_off) {
437 CodeCache::commit(this);
438 }
439
440 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
441 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
442
443 BufferedInlineTypeBlob* blob = nullptr;
444 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
445 {
446 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
447 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
448 }
449 // Track memory usage statistic after releasing CodeCache_lock
450 MemoryService::track_code_cache_memory_usage();
451
452 return blob;
453 }
454
455 //----------------------------------------------------------------------------------------------------
456 // Implementation of RuntimeStub
457
458 RuntimeStub::RuntimeStub(
459 const char* name,
460 CodeBuffer* cb,
461 int size,
462 int16_t frame_complete,
463 int frame_size,
464 OopMapSet* oop_maps,
465 bool caller_must_gc_arguments
466 )
467 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
468 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
469 {
470 }
471
|