252 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
253 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
254
255 BufferBlob* blob = nullptr;
256 unsigned int size = sizeof(BufferBlob);
257 // align the size to CodeEntryAlignment
258 size = CodeBlob::align_code_offset(size);
259 size += align_up(buffer_size, oopSize);
260 assert(name != nullptr, "must provide a name");
261 {
262 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
263 blob = new (size) BufferBlob(name, size);
264 }
265 // Track memory usage statistic after releasing CodeCache_lock
266 MemoryService::track_code_cache_memory_usage();
267
268 return blob;
269 }
270
271
272 BufferBlob::BufferBlob(const char* name, int size, CodeBuffer* cb)
273 : RuntimeBlob(name, cb, sizeof(BufferBlob), size, CodeOffsets::frame_never_safe, 0, nullptr)
274 {}
275
276 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
277 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
278
279 BufferBlob* blob = nullptr;
280 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
281 assert(name != nullptr, "must provide a name");
282 {
283 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
284 blob = new (size) BufferBlob(name, size, cb);
285 }
286 // Track memory usage statistic after releasing CodeCache_lock
287 MemoryService::track_code_cache_memory_usage();
288
289 return blob;
290 }
291
292 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
293 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
294 }
295
296 void BufferBlob::free(BufferBlob *blob) {
297 RuntimeBlob::free(blob);
298 }
299
300
301 //----------------------------------------------------------------------------------------------------
302 // Implementation of AdapterBlob
303
304 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
305 BufferBlob("I2C/C2I adapters", size, cb) {
306 CodeCache::commit(this);
307 }
308
309 AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
310 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
311
312 CodeCache::gc_on_allocation();
313
314 AdapterBlob* blob = nullptr;
315 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
316 {
317 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
318 blob = new (size) AdapterBlob(size, cb);
319 }
320 // Track memory usage statistic after releasing CodeCache_lock
321 MemoryService::track_code_cache_memory_usage();
322
323 return blob;
324 }
325
326 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
327 // Handling of allocation failure stops compilation and prints a bunch of
328 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
329 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
330 // this context as we hold the CompiledICLocker. So we just don't handle code
331 // cache exhaustion here; we leave that for a later allocation that does not
332 // hold the CompiledICLocker.
333 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
334 }
335
336 VtableBlob::VtableBlob(const char* name, int size) :
337 BufferBlob(name, size) {
338 }
376 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
377
378 MethodHandlesAdapterBlob* blob = nullptr;
379 unsigned int size = sizeof(MethodHandlesAdapterBlob);
380 // align the size to CodeEntryAlignment
381 size = CodeBlob::align_code_offset(size);
382 size += align_up(buffer_size, oopSize);
383 {
384 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
385 blob = new (size) MethodHandlesAdapterBlob(size);
386 if (blob == nullptr) {
387 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
388 }
389 }
390 // Track memory usage statistic after releasing CodeCache_lock
391 MemoryService::track_code_cache_memory_usage();
392
393 return blob;
394 }
395
396 //----------------------------------------------------------------------------------------------------
397 // Implementation of RuntimeStub
398
399 RuntimeStub::RuntimeStub(
400 const char* name,
401 CodeBuffer* cb,
402 int size,
403 int frame_complete,
404 int frame_size,
405 OopMapSet* oop_maps,
406 bool caller_must_gc_arguments
407 )
408 : RuntimeBlob(name, cb, sizeof(RuntimeStub), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
409 {
410 }
411
412 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
413 CodeBuffer* cb,
414 int frame_complete,
415 int frame_size,
|
252 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
253 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
254
255 BufferBlob* blob = nullptr;
256 unsigned int size = sizeof(BufferBlob);
257 // align the size to CodeEntryAlignment
258 size = CodeBlob::align_code_offset(size);
259 size += align_up(buffer_size, oopSize);
260 assert(name != nullptr, "must provide a name");
261 {
262 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
263 blob = new (size) BufferBlob(name, size);
264 }
265 // Track memory usage statistic after releasing CodeCache_lock
266 MemoryService::track_code_cache_memory_usage();
267
268 return blob;
269 }
270
271
272 BufferBlob::BufferBlob(const char* name, int header_size, int size, CodeBuffer* cb)
273 : RuntimeBlob(name, cb, header_size, size, CodeOffsets::frame_never_safe, 0, nullptr)
274 {}
275
276 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
277 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
278
279 BufferBlob* blob = nullptr;
280 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
281 assert(name != nullptr, "must provide a name");
282 {
283 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
284 blob = new (size) BufferBlob(name, sizeof(BufferBlob), size, cb);
285 }
286 // Track memory usage statistic after releasing CodeCache_lock
287 MemoryService::track_code_cache_memory_usage();
288
289 return blob;
290 }
291
292 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
293 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
294 }
295
296 void BufferBlob::free(BufferBlob *blob) {
297 RuntimeBlob::free(blob);
298 }
299
300 BufferBlob::BufferBlob(const char* name, int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
301 : RuntimeBlob(name, cb, sizeof(BufferBlob), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
302 {}
303
304
305 //----------------------------------------------------------------------------------------------------
306 // Implementation of AdapterBlob
307
308 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
309 BufferBlob("I2C/C2I adapters", size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
310 CodeCache::commit(this);
311 }
312
313 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
314 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
315
316 CodeCache::gc_on_allocation();
317
318 AdapterBlob* blob = nullptr;
319 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
320 {
321 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
322 blob = new (size) AdapterBlob(size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
323 }
324 // Track memory usage statistic after releasing CodeCache_lock
325 MemoryService::track_code_cache_memory_usage();
326
327 return blob;
328 }
329
330 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
331 // Handling of allocation failure stops compilation and prints a bunch of
332 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
333 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
334 // this context as we hold the CompiledICLocker. So we just don't handle code
335 // cache exhaustion here; we leave that for a later allocation that does not
336 // hold the CompiledICLocker.
337 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
338 }
339
340 VtableBlob::VtableBlob(const char* name, int size) :
341 BufferBlob(name, size) {
342 }
380 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
381
382 MethodHandlesAdapterBlob* blob = nullptr;
383 unsigned int size = sizeof(MethodHandlesAdapterBlob);
384 // align the size to CodeEntryAlignment
385 size = CodeBlob::align_code_offset(size);
386 size += align_up(buffer_size, oopSize);
387 {
388 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
389 blob = new (size) MethodHandlesAdapterBlob(size);
390 if (blob == nullptr) {
391 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
392 }
393 }
394 // Track memory usage statistic after releasing CodeCache_lock
395 MemoryService::track_code_cache_memory_usage();
396
397 return blob;
398 }
399
400 //----------------------------------------------------------------------------------------------------
401 // Implementation of BufferedInlineTypeBlob
402 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
403 BufferBlob("buffered inline type", sizeof(BufferedInlineTypeBlob), size, cb),
404 _pack_fields_off(pack_fields_off),
405 _pack_fields_jobject_off(pack_fields_jobject_off),
406 _unpack_fields_off(unpack_fields_off) {
407 CodeCache::commit(this);
408 }
409
410 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
411 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
412
413 BufferedInlineTypeBlob* blob = nullptr;
414 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
415 {
416 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
417 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
418 }
419 // Track memory usage statistic after releasing CodeCache_lock
420 MemoryService::track_code_cache_memory_usage();
421
422 return blob;
423 }
424
425 //----------------------------------------------------------------------------------------------------
426 // Implementation of RuntimeStub
427
428 RuntimeStub::RuntimeStub(
429 const char* name,
430 CodeBuffer* cb,
431 int size,
432 int frame_complete,
433 int frame_size,
434 OopMapSet* oop_maps,
435 bool caller_must_gc_arguments
436 )
437 : RuntimeBlob(name, cb, sizeof(RuntimeStub), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
438 {
439 }
440
441 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
442 CodeBuffer* cb,
443 int frame_complete,
444 int frame_size,
|