250 BufferBlob* BufferBlob::create(const char* name, int buffer_size) {
251 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
252
253 BufferBlob* blob = NULL;
254 unsigned int size = sizeof(BufferBlob);
255 // align the size to CodeEntryAlignment
256 size = CodeBlob::align_code_offset(size);
257 size += align_up(buffer_size, oopSize);
258 assert(name != NULL, "must provide a name");
259 {
260 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
261 blob = new (size) BufferBlob(name, size);
262 }
263 // Track memory usage statistic after releasing CodeCache_lock
264 MemoryService::track_code_cache_memory_usage();
265
266 return blob;
267 }
268
269
270 BufferBlob::BufferBlob(const char* name, int size, CodeBuffer* cb)
271 : RuntimeBlob(name, cb, sizeof(BufferBlob), size, CodeOffsets::frame_never_safe, 0, NULL)
272 {}
273
274 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
275 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
276
277 BufferBlob* blob = NULL;
278 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
279 assert(name != NULL, "must provide a name");
280 {
281 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
282 blob = new (size) BufferBlob(name, size, cb);
283 }
284 // Track memory usage statistic after releasing CodeCache_lock
285 MemoryService::track_code_cache_memory_usage();
286
287 return blob;
288 }
289
290 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
291 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
292 }
293
294 void BufferBlob::free(BufferBlob *blob) {
295 RuntimeBlob::free(blob);
296 }
297
298
299 //----------------------------------------------------------------------------------------------------
300 // Implementation of AdapterBlob
301
302 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
303 BufferBlob("I2C/C2I adapters", size, cb) {
304 CodeCache::commit(this);
305 }
306
307 AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
308 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
309
310 CodeCache::gc_on_allocation();
311
312 AdapterBlob* blob = NULL;
313 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
314 {
315 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
316 blob = new (size) AdapterBlob(size, cb);
317 }
318 // Track memory usage statistic after releasing CodeCache_lock
319 MemoryService::track_code_cache_memory_usage();
320
321 return blob;
322 }
323
324 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
325 // Handling of allocation failure stops compilation and prints a bunch of
326 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
327 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
328 // this context as we hold the CompiledICLocker. So we just don't handle code
329 // cache exhaustion here; we leave that for a later allocation that does not
330 // hold the CompiledICLocker.
331 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
332 }
333
334 VtableBlob::VtableBlob(const char* name, int size) :
335 BufferBlob(name, size) {
336 }
374 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
375
376 MethodHandlesAdapterBlob* blob = NULL;
377 unsigned int size = sizeof(MethodHandlesAdapterBlob);
378 // align the size to CodeEntryAlignment
379 size = CodeBlob::align_code_offset(size);
380 size += align_up(buffer_size, oopSize);
381 {
382 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
383 blob = new (size) MethodHandlesAdapterBlob(size);
384 if (blob == NULL) {
385 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
386 }
387 }
388 // Track memory usage statistic after releasing CodeCache_lock
389 MemoryService::track_code_cache_memory_usage();
390
391 return blob;
392 }
393
394 //----------------------------------------------------------------------------------------------------
395 // Implementation of RuntimeStub
396
397 RuntimeStub::RuntimeStub(
398 const char* name,
399 CodeBuffer* cb,
400 int size,
401 int frame_complete,
402 int frame_size,
403 OopMapSet* oop_maps,
404 bool caller_must_gc_arguments
405 )
406 : RuntimeBlob(name, cb, sizeof(RuntimeStub), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
407 {
408 }
409
410 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
411 CodeBuffer* cb,
412 int frame_complete,
413 int frame_size,
|
250 BufferBlob* BufferBlob::create(const char* name, int buffer_size) {
251 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
252
253 BufferBlob* blob = NULL;
254 unsigned int size = sizeof(BufferBlob);
255 // align the size to CodeEntryAlignment
256 size = CodeBlob::align_code_offset(size);
257 size += align_up(buffer_size, oopSize);
258 assert(name != NULL, "must provide a name");
259 {
260 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
261 blob = new (size) BufferBlob(name, size);
262 }
263 // Track memory usage statistic after releasing CodeCache_lock
264 MemoryService::track_code_cache_memory_usage();
265
266 return blob;
267 }
268
269
270 BufferBlob::BufferBlob(const char* name, int header_size, int size, CodeBuffer* cb)
271 : RuntimeBlob(name, cb, header_size, size, CodeOffsets::frame_never_safe, 0, NULL)
272 {}
273
274 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
275 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
276
277 BufferBlob* blob = NULL;
278 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
279 assert(name != NULL, "must provide a name");
280 {
281 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
282 blob = new (size) BufferBlob(name, sizeof(BufferBlob), size, cb);
283 }
284 // Track memory usage statistic after releasing CodeCache_lock
285 MemoryService::track_code_cache_memory_usage();
286
287 return blob;
288 }
289
290 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
291 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
292 }
293
294 void BufferBlob::free(BufferBlob *blob) {
295 RuntimeBlob::free(blob);
296 }
297
298 BufferBlob::BufferBlob(const char* name, int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
299 : RuntimeBlob(name, cb, sizeof(BufferBlob), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
300 {}
301
302
303 //----------------------------------------------------------------------------------------------------
304 // Implementation of AdapterBlob
305
306 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
307 BufferBlob("I2C/C2I adapters", size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
308 CodeCache::commit(this);
309 }
310
311 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
312 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
313
314 CodeCache::gc_on_allocation();
315
316 AdapterBlob* blob = NULL;
317 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
318 {
319 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
320 blob = new (size) AdapterBlob(size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
321 }
322 // Track memory usage statistic after releasing CodeCache_lock
323 MemoryService::track_code_cache_memory_usage();
324
325 return blob;
326 }
327
328 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
329 // Handling of allocation failure stops compilation and prints a bunch of
330 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
331 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
332 // this context as we hold the CompiledICLocker. So we just don't handle code
333 // cache exhaustion here; we leave that for a later allocation that does not
334 // hold the CompiledICLocker.
335 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
336 }
337
338 VtableBlob::VtableBlob(const char* name, int size) :
339 BufferBlob(name, size) {
340 }
378 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
379
380 MethodHandlesAdapterBlob* blob = NULL;
381 unsigned int size = sizeof(MethodHandlesAdapterBlob);
382 // align the size to CodeEntryAlignment
383 size = CodeBlob::align_code_offset(size);
384 size += align_up(buffer_size, oopSize);
385 {
386 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
387 blob = new (size) MethodHandlesAdapterBlob(size);
388 if (blob == NULL) {
389 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
390 }
391 }
392 // Track memory usage statistic after releasing CodeCache_lock
393 MemoryService::track_code_cache_memory_usage();
394
395 return blob;
396 }
397
398 //----------------------------------------------------------------------------------------------------
399 // Implementation of BufferedInlineTypeBlob
400 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
401 BufferBlob("buffered inline type", sizeof(BufferedInlineTypeBlob), size, cb),
402 _pack_fields_off(pack_fields_off),
403 _pack_fields_jobject_off(pack_fields_jobject_off),
404 _unpack_fields_off(unpack_fields_off) {
405 CodeCache::commit(this);
406 }
407
408 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
409 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
410
411 BufferedInlineTypeBlob* blob = NULL;
412 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
413 {
414 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
415 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
416 }
417 // Track memory usage statistic after releasing CodeCache_lock
418 MemoryService::track_code_cache_memory_usage();
419
420 return blob;
421 }
422
423 //----------------------------------------------------------------------------------------------------
424 // Implementation of RuntimeStub
425
426 RuntimeStub::RuntimeStub(
427 const char* name,
428 CodeBuffer* cb,
429 int size,
430 int frame_complete,
431 int frame_size,
432 OopMapSet* oop_maps,
433 bool caller_must_gc_arguments
434 )
435 : RuntimeBlob(name, cb, sizeof(RuntimeStub), size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
436 {
437 }
438
439 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
440 CodeBuffer* cb,
441 int frame_complete,
442 int frame_size,
|