< prev index next >

src/hotspot/share/memory/metaspace.cpp

Print this page

 521       log_trace(gc, metaspace)("    shrinking:  initThreshold: %.1fK  maximum_desired_capacity: %.1fK",
 522                                (double) MetaspaceSize / (double) K, (double) maximum_desired_capacity / (double) K);
 523       log_trace(gc, metaspace)("    shrink_bytes: %.1fK  current_shrink_factor: %d  new shrink factor: %d  MinMetaspaceExpansion: %.1fK",
 524                                (double) shrink_bytes / (double) K, current_shrink_factor, _shrink_factor, (double) MinMetaspaceExpansion / (double) K);
 525     }
 526   }
 527 
 528   // Don't shrink unless it's significant
 529   if (shrink_bytes >= MinMetaspaceExpansion &&
 530       ((capacity_until_GC - shrink_bytes) >= MetaspaceSize)) {
 531     size_t new_capacity_until_GC = MetaspaceGC::dec_capacity_until_GC(shrink_bytes);
 532     Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 533                                              new_capacity_until_GC,
 534                                              MetaspaceGCThresholdUpdater::ComputeNewSize);
 535   }
 536 }
 537 
 538 //////  Metaspace methods /////
 539 
 540 const MetaspaceTracer* Metaspace::_tracer = nullptr;


 541 
 542 bool Metaspace::initialized() {
 543   return metaspace::MetaspaceContext::context_nonclass() != nullptr
 544       LP64_ONLY(&& (using_class_space() ? Metaspace::class_space_is_initialized() : true));
 545 }
 546 
 547 #ifdef _LP64
 548 
 549 void Metaspace::print_compressed_class_space(outputStream* st) {
 550   if (VirtualSpaceList::vslist_class() != nullptr) {
 551     MetaWord* base = VirtualSpaceList::vslist_class()->base_of_first_node();
 552     size_t size = VirtualSpaceList::vslist_class()->word_size_of_first_node();
 553     MetaWord* top = base + size;
 554     st->print("Compressed class space mapped at: " PTR_FORMAT "-" PTR_FORMAT ", reserved size: " SIZE_FORMAT,
 555                p2i(base), p2i(top), (top - base) * BytesPerWord);
 556     st->cr();
 557   }
 558 }
 559 
 560 // Given a prereserved space, use that to set up the compressed class space list.
 561 void Metaspace::initialize_class_space(ReservedSpace rs) {
 562   assert(rs.size() >= CompressedClassSpaceSize,
 563          SIZE_FORMAT " != " SIZE_FORMAT, rs.size(), CompressedClassSpaceSize);
 564   assert(using_class_space(), "Must be using class space");
 565 
 566   assert(rs.size() == CompressedClassSpaceSize, SIZE_FORMAT " != " SIZE_FORMAT,
 567          rs.size(), CompressedClassSpaceSize);
 568   assert(is_aligned(rs.base(), Metaspace::reserve_alignment()) &&
 569          is_aligned(rs.size(), Metaspace::reserve_alignment()),
 570          "wrong alignment");
 571 
 572   MetaspaceContext::initialize_class_space_context(rs);


 573 }
 574 
 575 // Returns true if class space has been setup (initialize_class_space).
 576 bool Metaspace::class_space_is_initialized() {
 577   return MetaspaceContext::context_class() != nullptr;
 578 }
 579 
 580 // Reserve a range of memory that is to contain narrow Klass IDs. If "try_in_low_address_ranges"
 581 // is true, we will attempt to reserve memory suitable for zero-based encoding.
 582 ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size, bool optimize_for_zero_base) {
 583   char* result = nullptr;
 584 
 585   NOT_ZERO(result =
 586       (char*) CompressedKlassPointers::reserve_address_space_for_compressed_classes(size, RandomizeClassSpaceLocation,
 587                                                                                     optimize_for_zero_base));
 588 
 589   if (result == nullptr) {
 590     // Fallback: reserve anywhere
 591     log_debug(metaspace, map)("Trying anywhere...");
 592     result = os::reserve_memory_aligned(size, Metaspace::reserve_alignment(), false);

 630   //  commit charge. It defaults to max_uintx (unlimited).
 631   //
 632   // CompressedClassSpaceSize is the size, in bytes, of the address range we
 633   //  pre-reserve for the compressed class space (if we use class space).
 634   //  This size has to be aligned to the metaspace reserve alignment (to the
 635   //  size of a root chunk). It gets aligned up from whatever value the caller
 636   //  gave us to the next multiple of root chunk size.
 637   //
 638   // Note: Strictly speaking MaxMetaspaceSize and CompressedClassSpaceSize have
 639   //  very little to do with each other. The notion often encountered:
 640   //  MaxMetaspaceSize = CompressedClassSpaceSize + <non-class metadata size>
 641   //  is subtly wrong: MaxMetaspaceSize can besmaller than CompressedClassSpaceSize,
 642   //  in which case we just would not be able to fully commit the class space range.
 643   //
 644   // We still adjust CompressedClassSpaceSize to reasonable limits, mainly to
 645   //  save on reserved space, and to make ergnonomics less confusing.
 646 
 647   MaxMetaspaceSize = MAX2(MaxMetaspaceSize, commit_alignment());
 648 
 649   if (UseCompressedClassPointers) {




 650     // Let CCS size not be larger than 80% of MaxMetaspaceSize. Note that is
 651     // grossly over-dimensioned for most usage scenarios; typical ratio of
 652     // class space : non class space usage is about 1:6. With many small classes,
 653     // it can get as low as 1:2. It is not a big deal though since ccs is only
 654     // reserved and will be committed on demand only.
 655     size_t max_ccs_size = 8 * (MaxMetaspaceSize / 10);
 656     size_t adjusted_ccs_size = MIN2(CompressedClassSpaceSize, max_ccs_size);







 657 
 658     // CCS must be aligned to root chunk size, and be at least the size of one
 659     //  root chunk.
 660     adjusted_ccs_size = align_up(adjusted_ccs_size, reserve_alignment());
 661     adjusted_ccs_size = MAX2(adjusted_ccs_size, reserve_alignment());











 662 
 663     // Note: re-adjusting may have us left with a CompressedClassSpaceSize
 664     //  larger than MaxMetaspaceSize for very small values of MaxMetaspaceSize.
 665     //  Lets just live with that, its not a big deal.
 666 
 667     if (adjusted_ccs_size != CompressedClassSpaceSize) {
 668       FLAG_SET_ERGO(CompressedClassSpaceSize, adjusted_ccs_size);
 669       log_info(metaspace)("Setting CompressedClassSpaceSize to " SIZE_FORMAT ".",
 670                           CompressedClassSpaceSize);
 671     }
 672   }
 673 
 674   // Set MetaspaceSize, MinMetaspaceExpansion and MaxMetaspaceExpansion
 675   if (MetaspaceSize > MaxMetaspaceSize) {
 676     MetaspaceSize = MaxMetaspaceSize;
 677   }
 678 
 679   MetaspaceSize = align_down_bounded(MetaspaceSize, commit_alignment());
 680 
 681   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
 682 
 683   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, commit_alignment());
 684   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, commit_alignment());
 685 
 686 }

 757     // ...failing that, reserve anywhere, but let platform do optimized placement:
 758     if (!rs.is_reserved()) {
 759       log_info(metaspace)("Reserving compressed class space anywhere");
 760       rs = Metaspace::reserve_address_space_for_compressed_classes(size, true);
 761     }
 762 
 763     // ...failing that, give up.
 764     if (!rs.is_reserved()) {
 765       vm_exit_during_initialization(
 766           err_msg("Could not allocate compressed class space: " SIZE_FORMAT " bytes",
 767                    CompressedClassSpaceSize));
 768     }
 769 
 770     // Mark class space as such
 771     MemTracker::record_virtual_memory_type((address)rs.base(), mtClass);
 772 
 773     // Initialize space
 774     Metaspace::initialize_class_space(rs);
 775 
 776     // Set up compressed class pointer encoding.

 777     CompressedKlassPointers::initialize((address)rs.base(), rs.size());
 778   }
 779 
 780 #endif
 781 
 782   // Initialize non-class virtual space list, and its chunk manager:
 783   MetaspaceContext::initialize_nonclass_space_context();
 784 
 785   _tracer = new MetaspaceTracer();
 786 
 787   // We must prevent the very first address of the ccs from being used to store
 788   // metadata, since that address would translate to a narrow pointer of 0, and the
 789   // VM does not distinguish between "narrow 0 as in null" and "narrow 0 as in start
 790   //  of ccs".
 791   // Before Elastic Metaspace that did not happen due to the fact that every Metachunk
 792   // had a header and therefore could not allocate anything at offset 0.
 793 #ifdef _LP64
 794   if (using_class_space()) {
 795     // The simplest way to fix this is to allocate a tiny dummy chunk right at the
 796     // start of ccs and do not use it for anything.

 825 // This version of Metaspace::allocate does not throw OOM but simply returns null, and
 826 // is suitable for calling from non-Java threads.
 827 // Callers are responsible for checking null.
 828 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 829                               MetaspaceObj::Type type) {
 830   assert(word_size <= Metaspace::max_allocation_word_size(),
 831          "allocation size too large (" SIZE_FORMAT ")", word_size);
 832 
 833   assert(loader_data != nullptr, "Should never pass around a null loader_data. "
 834         "ClassLoaderData::the_null_class_loader_data() should have been used.");
 835 
 836   // Deal with concurrent unloading failed allocation starvation
 837   MetaspaceCriticalAllocation::block_if_concurrent_purge();
 838 
 839   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
 840 
 841   // Try to allocate metadata.
 842   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
 843 
 844   if (result != nullptr) {









 845     // Zero initialize.
 846     Copy::fill_to_words((HeapWord*)result, word_size, 0);
 847 
 848     log_trace(metaspace)("Metaspace::allocate: type %d return " PTR_FORMAT ".", (int)type, p2i(result));
 849   }
 850 
 851   return result;
 852 }
 853 
 854 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 855                               MetaspaceObj::Type type, TRAPS) {
 856 
 857   if (HAS_PENDING_EXCEPTION) {
 858     assert(false, "Should not allocate with exception pending");
 859     return nullptr;  // caller does a CHECK_NULL too
 860   }
 861   assert(!THREAD->owns_locks(), "allocating metaspace while holding mutex");
 862 
 863   MetaWord* result = allocate(loader_data, word_size, type);
 864 
 865   if (result == nullptr) {
 866     MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
 867     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);

 960     ChunkManager* cm = ChunkManager::chunkmanager_nonclass();
 961     if (cm != nullptr) {
 962       cm->purge();
 963     }
 964     if (using_class_space()) {
 965       cm = ChunkManager::chunkmanager_class();
 966       if (cm != nullptr) {
 967         cm->purge();
 968       }
 969     }
 970   }
 971 
 972   // Try to satisfy queued metaspace allocation requests.
 973   //
 974   // It might seem unnecessary to try to process allocation requests if no
 975   // classes have been unloaded. However, this call is required for the code
 976   // in MetaspaceCriticalAllocation::try_allocate_critical to work.
 977   MetaspaceCriticalAllocation::process();
 978 }
 979 
 980 bool Metaspace::contains(const void* ptr) {
 981   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
 982     return true;
 983   }
 984   return contains_non_shared(ptr);
 985 }
 986 
 987 bool Metaspace::contains_non_shared(const void* ptr) {
 988   if (using_class_space() && VirtualSpaceList::vslist_class()->contains((MetaWord*)ptr)) {
 989      return true;
 990   }
 991 
 992   return VirtualSpaceList::vslist_nonclass()->contains((MetaWord*)ptr);
 993 }

 521       log_trace(gc, metaspace)("    shrinking:  initThreshold: %.1fK  maximum_desired_capacity: %.1fK",
 522                                (double) MetaspaceSize / (double) K, (double) maximum_desired_capacity / (double) K);
 523       log_trace(gc, metaspace)("    shrink_bytes: %.1fK  current_shrink_factor: %d  new shrink factor: %d  MinMetaspaceExpansion: %.1fK",
 524                                (double) shrink_bytes / (double) K, current_shrink_factor, _shrink_factor, (double) MinMetaspaceExpansion / (double) K);
 525     }
 526   }
 527 
 528   // Don't shrink unless it's significant
 529   if (shrink_bytes >= MinMetaspaceExpansion &&
 530       ((capacity_until_GC - shrink_bytes) >= MetaspaceSize)) {
 531     size_t new_capacity_until_GC = MetaspaceGC::dec_capacity_until_GC(shrink_bytes);
 532     Metaspace::tracer()->report_gc_threshold(capacity_until_GC,
 533                                              new_capacity_until_GC,
 534                                              MetaspaceGCThresholdUpdater::ComputeNewSize);
 535   }
 536 }
 537 
 538 //////  Metaspace methods /////
 539 
 540 const MetaspaceTracer* Metaspace::_tracer = nullptr;
 541 const void* Metaspace::_class_space_start = nullptr;
 542 const void* Metaspace::_class_space_end = nullptr;
 543 
 544 bool Metaspace::initialized() {
 545   return metaspace::MetaspaceContext::context_nonclass() != nullptr
 546       LP64_ONLY(&& (using_class_space() ? Metaspace::class_space_is_initialized() : true));
 547 }
 548 
 549 #ifdef _LP64
 550 
 551 void Metaspace::print_compressed_class_space(outputStream* st) {
 552   if (VirtualSpaceList::vslist_class() != nullptr) {
 553     MetaWord* base = VirtualSpaceList::vslist_class()->base_of_first_node();
 554     size_t size = VirtualSpaceList::vslist_class()->word_size_of_first_node();
 555     MetaWord* top = base + size;
 556     st->print("Compressed class space mapped at: " PTR_FORMAT "-" PTR_FORMAT ", reserved size: " SIZE_FORMAT,
 557                p2i(base), p2i(top), (top - base) * BytesPerWord);
 558     st->cr();
 559   }
 560 }
 561 
 562 // Given a prereserved space, use that to set up the compressed class space list.
 563 void Metaspace::initialize_class_space(ReservedSpace rs) {
 564   assert(rs.size() >= CompressedClassSpaceSize,
 565          SIZE_FORMAT " != " SIZE_FORMAT, rs.size(), CompressedClassSpaceSize);
 566   assert(using_class_space(), "Must be using class space");
 567 
 568   assert(rs.size() == CompressedClassSpaceSize, SIZE_FORMAT " != " SIZE_FORMAT,
 569          rs.size(), CompressedClassSpaceSize);
 570   assert(is_aligned(rs.base(), Metaspace::reserve_alignment()) &&
 571          is_aligned(rs.size(), Metaspace::reserve_alignment()),
 572          "wrong alignment");
 573 
 574   MetaspaceContext::initialize_class_space_context(rs);
 575   _class_space_start = rs.base();
 576   _class_space_end = rs.end();
 577 }
 578 
 579 // Returns true if class space has been setup (initialize_class_space).
 580 bool Metaspace::class_space_is_initialized() {
 581   return MetaspaceContext::context_class() != nullptr;
 582 }
 583 
 584 // Reserve a range of memory that is to contain narrow Klass IDs. If "try_in_low_address_ranges"
 585 // is true, we will attempt to reserve memory suitable for zero-based encoding.
 586 ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size, bool optimize_for_zero_base) {
 587   char* result = nullptr;
 588 
 589   NOT_ZERO(result =
 590       (char*) CompressedKlassPointers::reserve_address_space_for_compressed_classes(size, RandomizeClassSpaceLocation,
 591                                                                                     optimize_for_zero_base));
 592 
 593   if (result == nullptr) {
 594     // Fallback: reserve anywhere
 595     log_debug(metaspace, map)("Trying anywhere...");
 596     result = os::reserve_memory_aligned(size, Metaspace::reserve_alignment(), false);

 634   //  commit charge. It defaults to max_uintx (unlimited).
 635   //
 636   // CompressedClassSpaceSize is the size, in bytes, of the address range we
 637   //  pre-reserve for the compressed class space (if we use class space).
 638   //  This size has to be aligned to the metaspace reserve alignment (to the
 639   //  size of a root chunk). It gets aligned up from whatever value the caller
 640   //  gave us to the next multiple of root chunk size.
 641   //
 642   // Note: Strictly speaking MaxMetaspaceSize and CompressedClassSpaceSize have
 643   //  very little to do with each other. The notion often encountered:
 644   //  MaxMetaspaceSize = CompressedClassSpaceSize + <non-class metadata size>
 645   //  is subtly wrong: MaxMetaspaceSize can besmaller than CompressedClassSpaceSize,
 646   //  in which case we just would not be able to fully commit the class space range.
 647   //
 648   // We still adjust CompressedClassSpaceSize to reasonable limits, mainly to
 649   //  save on reserved space, and to make ergnonomics less confusing.
 650 
 651   MaxMetaspaceSize = MAX2(MaxMetaspaceSize, commit_alignment());
 652 
 653   if (UseCompressedClassPointers) {
 654     // Adjust size of the compressed class space.
 655 
 656     const size_t res_align = reserve_alignment();
 657 
 658     // Let CCS size not be larger than 80% of MaxMetaspaceSize. Note that is
 659     // grossly over-dimensioned for most usage scenarios; typical ratio of
 660     // class space : non class space usage is about 1:6. With many small classes,
 661     // it can get as low as 1:2. It is not a big deal though since ccs is only
 662     // reserved and will be committed on demand only.
 663     const size_t max_ccs_size = 8 * (MaxMetaspaceSize / 10);
 664 
 665     // CCS is also limited by the max. possible Klass encoding range size
 666     const size_t max_encoding_range = CompressedKlassPointers::max_encoding_range_size();
 667     assert(max_encoding_range >= res_align,
 668            "Encoding range (%zu) must cover at least a full root chunk (%zu)",
 669            max_encoding_range, res_align);
 670 
 671     size_t adjusted_ccs_size = MIN3(CompressedClassSpaceSize, max_ccs_size, max_encoding_range);
 672 
 673     // CCS must be aligned to root chunk size, and be at least the size of one
 674     //  root chunk. But impose a miminum size of 1 root chunk (16MB).
 675     adjusted_ccs_size = MAX2(align_down(adjusted_ccs_size, res_align), res_align);
 676 
 677     // Print a warning if the adjusted size differs from the users input
 678     if (CompressedClassSpaceSize != adjusted_ccs_size) {
 679       #define X "CompressedClassSpaceSize adjusted from user input " \
 680                 "%zu bytes to %zu bytes", CompressedClassSpaceSize, adjusted_ccs_size
 681       if (FLAG_IS_CMDLINE(CompressedClassSpaceSize)) {
 682         log_warning(metaspace)(X);
 683       } else {
 684         log_info(metaspace)(X);
 685       }
 686       #undef X
 687     }
 688 
 689     // Note: re-adjusting may have us left with a CompressedClassSpaceSize
 690     //  larger than MaxMetaspaceSize for very small values of MaxMetaspaceSize.
 691     //  Lets just live with that, its not a big deal.

 692     if (adjusted_ccs_size != CompressedClassSpaceSize) {
 693       FLAG_SET_ERGO(CompressedClassSpaceSize, adjusted_ccs_size);
 694       log_info(metaspace)("Setting CompressedClassSpaceSize to " SIZE_FORMAT ".",
 695                           CompressedClassSpaceSize);
 696     }
 697   }
 698 
 699   // Set MetaspaceSize, MinMetaspaceExpansion and MaxMetaspaceExpansion
 700   if (MetaspaceSize > MaxMetaspaceSize) {
 701     MetaspaceSize = MaxMetaspaceSize;
 702   }
 703 
 704   MetaspaceSize = align_down_bounded(MetaspaceSize, commit_alignment());
 705 
 706   assert(MetaspaceSize <= MaxMetaspaceSize, "MetaspaceSize should be limited by MaxMetaspaceSize");
 707 
 708   MinMetaspaceExpansion = align_down_bounded(MinMetaspaceExpansion, commit_alignment());
 709   MaxMetaspaceExpansion = align_down_bounded(MaxMetaspaceExpansion, commit_alignment());
 710 
 711 }

 782     // ...failing that, reserve anywhere, but let platform do optimized placement:
 783     if (!rs.is_reserved()) {
 784       log_info(metaspace)("Reserving compressed class space anywhere");
 785       rs = Metaspace::reserve_address_space_for_compressed_classes(size, true);
 786     }
 787 
 788     // ...failing that, give up.
 789     if (!rs.is_reserved()) {
 790       vm_exit_during_initialization(
 791           err_msg("Could not allocate compressed class space: " SIZE_FORMAT " bytes",
 792                    CompressedClassSpaceSize));
 793     }
 794 
 795     // Mark class space as such
 796     MemTracker::record_virtual_memory_type((address)rs.base(), mtClass);
 797 
 798     // Initialize space
 799     Metaspace::initialize_class_space(rs);
 800 
 801     // Set up compressed class pointer encoding.
 802     // In CDS=off mode, we give the JVM some leeway to choose a favorable base/shift combination.
 803     CompressedKlassPointers::initialize((address)rs.base(), rs.size());
 804   }
 805 
 806 #endif
 807 
 808   // Initialize non-class virtual space list, and its chunk manager:
 809   MetaspaceContext::initialize_nonclass_space_context();
 810 
 811   _tracer = new MetaspaceTracer();
 812 
 813   // We must prevent the very first address of the ccs from being used to store
 814   // metadata, since that address would translate to a narrow pointer of 0, and the
 815   // VM does not distinguish between "narrow 0 as in null" and "narrow 0 as in start
 816   //  of ccs".
 817   // Before Elastic Metaspace that did not happen due to the fact that every Metachunk
 818   // had a header and therefore could not allocate anything at offset 0.
 819 #ifdef _LP64
 820   if (using_class_space()) {
 821     // The simplest way to fix this is to allocate a tiny dummy chunk right at the
 822     // start of ccs and do not use it for anything.

 851 // This version of Metaspace::allocate does not throw OOM but simply returns null, and
 852 // is suitable for calling from non-Java threads.
 853 // Callers are responsible for checking null.
 854 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 855                               MetaspaceObj::Type type) {
 856   assert(word_size <= Metaspace::max_allocation_word_size(),
 857          "allocation size too large (" SIZE_FORMAT ")", word_size);
 858 
 859   assert(loader_data != nullptr, "Should never pass around a null loader_data. "
 860         "ClassLoaderData::the_null_class_loader_data() should have been used.");
 861 
 862   // Deal with concurrent unloading failed allocation starvation
 863   MetaspaceCriticalAllocation::block_if_concurrent_purge();
 864 
 865   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
 866 
 867   // Try to allocate metadata.
 868   MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
 869 
 870   if (result != nullptr) {
 871 #ifdef ASSERT
 872     if (using_class_space() && mdtype == ClassType) {
 873       assert(is_in_class_space(result) &&
 874              is_aligned(result, CompressedKlassPointers::klass_alignment_in_bytes()), "Sanity");
 875     } else {
 876       assert((is_in_class_space(result) || is_in_nonclass_metaspace(result)) &&
 877              is_aligned(result, Metaspace::min_allocation_alignment_bytes), "Sanity");
 878     }
 879 #endif
 880     // Zero initialize.
 881     Copy::fill_to_words((HeapWord*)result, word_size, 0);

 882     log_trace(metaspace)("Metaspace::allocate: type %d return " PTR_FORMAT ".", (int)type, p2i(result));
 883   }
 884 
 885   return result;
 886 }
 887 
 888 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
 889                               MetaspaceObj::Type type, TRAPS) {
 890 
 891   if (HAS_PENDING_EXCEPTION) {
 892     assert(false, "Should not allocate with exception pending");
 893     return nullptr;  // caller does a CHECK_NULL too
 894   }
 895   assert(!THREAD->owns_locks(), "allocating metaspace while holding mutex");
 896 
 897   MetaWord* result = allocate(loader_data, word_size, type);
 898 
 899   if (result == nullptr) {
 900     MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
 901     tracer()->report_metaspace_allocation_failure(loader_data, word_size, type, mdtype);

 994     ChunkManager* cm = ChunkManager::chunkmanager_nonclass();
 995     if (cm != nullptr) {
 996       cm->purge();
 997     }
 998     if (using_class_space()) {
 999       cm = ChunkManager::chunkmanager_class();
1000       if (cm != nullptr) {
1001         cm->purge();
1002       }
1003     }
1004   }
1005 
1006   // Try to satisfy queued metaspace allocation requests.
1007   //
1008   // It might seem unnecessary to try to process allocation requests if no
1009   // classes have been unloaded. However, this call is required for the code
1010   // in MetaspaceCriticalAllocation::try_allocate_critical to work.
1011   MetaspaceCriticalAllocation::process();
1012 }
1013 
1014 // Returns true if pointer points into one of the metaspace regions, or
1015 // into the class space.
1016 bool Metaspace::is_in_shared_metaspace(const void* ptr) {
1017   return MetaspaceShared::is_in_shared_metaspace(ptr);

1018 }
1019 
1020 // Returns true if pointer points into one of the non-class-space metaspace regions.
1021 bool Metaspace::is_in_nonclass_metaspace(const void* ptr) {



1022   return VirtualSpaceList::vslist_nonclass()->contains((MetaWord*)ptr);
1023 }
< prev index next >