< prev index next >

src/hotspot/share/memory/metaspace/metaspaceArena.cpp

Print this page

 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "logging/log.hpp"
 28 #include "logging/logStream.hpp"
 29 #include "memory/metaspace/chunkManager.hpp"
 30 #include "memory/metaspace/counters.hpp"
 31 #include "memory/metaspace/freeBlocks.hpp"
 32 #include "memory/metaspace/internalStats.hpp"

 33 #include "memory/metaspace/metachunk.hpp"
 34 #include "memory/metaspace/metaspaceArena.hpp"
 35 #include "memory/metaspace/metaspaceArenaGrowthPolicy.hpp"
 36 #include "memory/metaspace/metaspaceCommon.hpp"

 37 #include "memory/metaspace/metaspaceSettings.hpp"
 38 #include "memory/metaspace/metaspaceStatistics.hpp"
 39 #include "memory/metaspace/virtualSpaceList.hpp"
 40 #include "runtime/atomic.hpp"
 41 #include "runtime/init.hpp"
 42 #include "runtime/mutexLocker.hpp"
 43 #include "services/memoryService.hpp"
 44 #include "utilities/align.hpp"
 45 #include "utilities/debug.hpp"
 46 #include "utilities/globalDefinitions.hpp"
 47 
 48 namespace metaspace {
 49 
 50 #define LOGFMT         "Arena @" PTR_FORMAT " (%s)"
 51 #define LOGFMT_ARGS    p2i(this), this->_name
 52 
 53 // Returns the level of the next chunk to be added, acc to growth policy.
 54 chunklevel_t MetaspaceArena::next_chunk_level() const {
 55   const int growth_step = _chunks.count();
 56   return _growth_policy->get_level_at_step(growth_step);
 57 }
 58 
 59 // Given a chunk, add its remaining free committed space to the free block list.
 60 void MetaspaceArena::salvage_chunk(Metachunk* c) {
 61   size_t remaining_words = c->free_below_committed_words();

 62   if (remaining_words >= FreeBlocks::MinWordSize) {
 63 
 64     UL2(trace, "salvaging chunk " METACHUNK_FULL_FORMAT ".", METACHUNK_FULL_FORMAT_ARGS(c));
 65 
 66     MetaWord* ptr = c->allocate(remaining_words);
 67     assert(ptr != nullptr, "Should have worked");
 68     _total_used_words_counter->increment_by(remaining_words);
 69 
 70     add_allocation_to_fbl(ptr, remaining_words);
 71 
 72     // After this operation: the chunk should have no free committed space left.
 73     assert(c->free_below_committed_words() == 0,
 74            "Salvaging chunk failed (chunk " METACHUNK_FULL_FORMAT ").",
 75            METACHUNK_FULL_FORMAT_ARGS(c));
 76   }

 77 }
 78 
 79 // Allocate a new chunk from the underlying chunk manager able to hold at least
 80 // requested word size.
 81 Metachunk* MetaspaceArena::allocate_new_chunk(size_t requested_word_size) {
 82   // Should this ever happen, we need to increase the maximum possible chunk size.
 83   guarantee(requested_word_size <= chunklevel::MAX_CHUNK_WORD_SIZE,
 84             "Requested size too large (" SIZE_FORMAT ") - max allowed size per allocation is " SIZE_FORMAT ".",
 85             requested_word_size, chunklevel::MAX_CHUNK_WORD_SIZE);
 86 
 87   const chunklevel_t max_level = chunklevel::level_fitting_word_size(requested_word_size);
 88   const chunklevel_t preferred_level = MIN2(max_level, next_chunk_level());
 89 
 90   Metachunk* c = _chunk_manager->get_chunk(preferred_level, max_level, requested_word_size);
 91   if (c == nullptr) {
 92     return nullptr;
 93   }
 94 
 95   assert(c->is_in_use(), "Wrong chunk state.");
 96   assert(c->free_below_committed_words() >= requested_word_size, "Chunk not committed");
 97   return c;
 98 }
 99 
100 void MetaspaceArena::add_allocation_to_fbl(MetaWord* p, size_t word_size) {
101   assert(p != nullptr, "p is null");
102   assert_is_aligned_metaspace_pointer(p);
103   assert(word_size > 0, "zero sized");
104 
105   if (_fbl == nullptr) {
106     _fbl = new FreeBlocks(); // Create only on demand
107   }
108   _fbl->add_block(p, word_size);
109 }
110 
111 MetaspaceArena::MetaspaceArena(ChunkManager* chunk_manager, const ArenaGrowthPolicy* growth_policy,
112                                SizeAtomicCounter* total_used_words_counter,
113                                const char* name) :
114   _chunk_manager(chunk_manager),


115   _growth_policy(growth_policy),
116   _chunks(),
117   _fbl(nullptr),
118   _total_used_words_counter(total_used_words_counter),
119   _name(name)
120 #ifdef ASSERT
121   , _first_fence(nullptr)
122 #endif
123 {
124   UL(debug, ": born.");






125 
126   // Update statistics
127   InternalStats::inc_num_arena_births();
128 }
129 
130 MetaspaceArena::~MetaspaceArena() {
131 #ifdef ASSERT
132   SOMETIMES(verify();)
133   if (Settings::use_allocation_guard()) {
134     verify_allocation_guards();
135   }
136 #endif
137   MemRangeCounter return_counter;
138 
139   Metachunk* c = _chunks.first();
140   Metachunk* c2 = nullptr;
141 
142   while (c) {
143     c2 = c->next();
144     return_counter.add(c->used_words());
145     DEBUG_ONLY(c->set_prev(nullptr);)
146     DEBUG_ONLY(c->set_next(nullptr);)
147     UL2(debug, "return chunk: " METACHUNK_FORMAT ".", METACHUNK_FORMAT_ARGS(c));
148     _chunk_manager->return_chunk(c);
149     // c may be invalid after return_chunk(c) was called. Don't access anymore.
150     c = c2;
151   }
152 
153   UL2(info, "returned %d chunks, total capacity " SIZE_FORMAT " words.",
154       return_counter.count(), return_counter.total_size());
155 
156   _total_used_words_counter->decrement_by(return_counter.total_size());
157   SOMETIMES(chunk_manager()->verify();)
158   delete _fbl;
159   UL(debug, ": dies.");
160 
161   // Update statistics
162   InternalStats::inc_num_arena_deaths();
163 }
164 
165 // Attempt to enlarge the current chunk to make it large enough to hold at least
166 //  requested_word_size additional words.
167 //
168 // On success, true is returned, false otherwise.
169 bool MetaspaceArena::attempt_enlarge_current_chunk(size_t requested_word_size) {
170   Metachunk* c = current_chunk();
171   assert(c->free_words() < requested_word_size, "Sanity");
172 
173   // Not if chunk enlargement is switched off...

198   if (!c->is_leader()) {
199     return false;
200   }
201   // If the size added to the chunk would be larger than allowed for the next growth step
202   // dont enlarge.
203   if (next_chunk_level() > c->level()) {
204     return false;
205   }
206 
207   bool success = _chunk_manager->attempt_enlarge_chunk(c);
208   assert(success == false || c->free_words() >= requested_word_size, "Sanity");
209   return success;
210 }
211 
212 // Allocate memory from Metaspace.
213 // 1) Attempt to allocate from the free block list.
214 // 2) Attempt to allocate from the current chunk.
215 // 3) Attempt to enlarge the current chunk in place if it is too small.
216 // 4) Attempt to get a new chunk and allocate from that chunk.
217 // At any point, if we hit a commit limit, we return null.
218 MetaWord* MetaspaceArena::allocate(size_t requested_word_size) {
219   UL2(trace, "requested " SIZE_FORMAT " words.", requested_word_size);
220 
221   MetaWord* p = nullptr;
222   const size_t aligned_word_size = get_raw_word_size_for_requested_word_size(requested_word_size);
223 



224   // Before bothering the arena proper, attempt to re-use a block from the free blocks list
225   if (_fbl != nullptr && !_fbl->is_empty()) {
226     p = _fbl->remove_block(aligned_word_size);
227     if (p != nullptr) {






228       DEBUG_ONLY(InternalStats::inc_num_allocs_from_deallocated_blocks();)
229       UL2(trace, "returning " PTR_FORMAT " - taken from fbl (now: %d, " SIZE_FORMAT ").",
230           p2i(p), _fbl->count(), _fbl->total_size());
231       assert_is_aligned_metaspace_pointer(p);
232       // Note: free blocks in freeblock dictionary still count as "used" as far as statistics go;
233       // therefore we have no need to adjust any usage counters (see epilogue of allocate_inner())
234       // and can just return here.
235       return p;
236     }
237   }
238 
239   // Primary allocation
240   p = allocate_inner(aligned_word_size);













241 
242 #ifdef ASSERT
243   // Fence allocation
244   if (p != nullptr && Settings::use_allocation_guard()) {
245     STATIC_ASSERT(is_aligned(sizeof(Fence), BytesPerWord));
246     MetaWord* guard = allocate_inner(sizeof(Fence) / BytesPerWord);
247     if (guard != nullptr) {
248       // Ignore allocation errors for the fence to keep coding simple. If this
249       // happens (e.g. because right at this time we hit the Metaspace GC threshold)
250       // we miss adding this one fence. Not a big deal. Note that his would
251       // be pretty rare. Chances are much higher the primary allocation above
252       // would have already failed).
253       Fence* f = new(guard) Fence(_first_fence);
254       _first_fence = f;
255     }


















256   }
















257 #endif // ASSERT
258 
259   return p;
260 }
261 
262 // Allocate from the arena proper, once dictionary allocations and fencing are sorted out.
263 MetaWord* MetaspaceArena::allocate_inner(size_t word_size) {
264   assert_is_aligned(word_size, metaspace::AllocationAlignmentWordSize);
265 
266   MetaWord* p = nullptr;
267   bool current_chunk_too_small = false;
268   bool commit_failure = false;

269 
270   if (current_chunk() != nullptr) {
271 
272     // Attempt to satisfy the allocation from the current chunk.
273 




274     // If the current chunk is too small to hold the requested size, attempt to enlarge it.
275     // If that fails, retire the chunk.
276     if (current_chunk()->free_words() < word_size) {
277       if (!attempt_enlarge_current_chunk(word_size)) {
278         current_chunk_too_small = true;
279       } else {
280         DEBUG_ONLY(InternalStats::inc_num_chunks_enlarged();)
281         UL(debug, "enlarged chunk.");
282       }
283     }
284 
285     // Commit the chunk far enough to hold the requested word size. If that fails, we
286     // hit a limit (either GC threshold or MaxMetaspaceSize). In that case retire the
287     // chunk.
288     if (!current_chunk_too_small) {
289       if (!current_chunk()->ensure_committed_additional(word_size)) {
290         UL2(info, "commit failure (requested size: " SIZE_FORMAT ")", word_size);
291         commit_failure = true;
292       }
293     }
294 
295     // Allocate from the current chunk. This should work now.
296     if (!current_chunk_too_small && !commit_failure) {
297       p = current_chunk()->allocate(word_size);
298       assert(p != nullptr, "Allocation from chunk failed.");






299     }
300   }
301 
302   if (p == nullptr) {
303     // If we are here, we either had no current chunk to begin with or it was deemed insufficient.
304     assert(current_chunk() == nullptr ||
305            current_chunk_too_small || commit_failure, "Sanity");
306 
307     Metachunk* new_chunk = allocate_new_chunk(word_size);
308     if (new_chunk != nullptr) {
309       UL2(debug, "allocated new chunk " METACHUNK_FORMAT " for requested word size " SIZE_FORMAT ".",
310           METACHUNK_FORMAT_ARGS(new_chunk), word_size);
311 
312       assert(new_chunk->free_below_committed_words() >= word_size, "Sanity");
313 
314       // We have a new chunk. Before making it the current chunk, retire the old one.
315       if (current_chunk() != nullptr) {
316         salvage_chunk(current_chunk());
317         DEBUG_ONLY(InternalStats::inc_num_chunks_retired();)
318       }
319 
320       _chunks.add(new_chunk);
321 
322       // Now, allocate from that chunk. That should work.
323       p = current_chunk()->allocate(word_size);


324       assert(p != nullptr, "Allocation from chunk failed.");

325     } else {
326       UL2(info, "failed to allocate new chunk for requested word size " SIZE_FORMAT ".", word_size);
327     }
328   }
329 
330   if (p == nullptr) {
331     InternalStats::inc_num_allocs_failed_limit();
332   } else {
333     DEBUG_ONLY(InternalStats::inc_num_allocs();)
334     _total_used_words_counter->increment_by(word_size);
335   }
336 
337   SOMETIMES(verify();)
338 
339   if (p == nullptr) {
340     UL(info, "allocation failed, returned null.");
341   } else {
342     UL2(trace, "after allocation: %u chunk(s), current:" METACHUNK_FULL_FORMAT,
343         _chunks.count(), METACHUNK_FULL_FORMAT_ARGS(current_chunk()));
344     UL2(trace, "returning " PTR_FORMAT ".", p2i(p));
345   }
346 
347   assert_is_aligned_metaspace_pointer(p);











348 
349   return p;
350 }
351 
352 // Prematurely returns a metaspace allocation to the _block_freelists
353 // because it is not needed anymore (requires CLD lock to be active).
354 void MetaspaceArena::deallocate(MetaWord* p, size_t word_size) {
355   // At this point a current chunk must exist since we only deallocate if we did allocate before.
356   assert(current_chunk() != nullptr, "stray deallocation?");
357   assert(is_valid_area(p, word_size),
358          "Pointer range not part of this Arena and cannot be deallocated: (" PTR_FORMAT ".." PTR_FORMAT ").",
359          p2i(p), p2i(p + word_size));
360 
361   UL2(trace, "deallocating " PTR_FORMAT ", word size: " SIZE_FORMAT ".",
362       p2i(p), word_size);
363 
364   // Only blocks that had been allocated via MetaspaceArena::allocate(size) must be handed in
365   // to MetaspaceArena::deallocate(), and only with the same size that had been original used for allocation.
366   // Therefore the pointer must be aligned correctly, and size can be alignment-adjusted (the latter
367   // only matters on 32-bit):
368   assert_is_aligned_metaspace_pointer(p);
369   size_t raw_word_size = get_raw_word_size_for_requested_word_size(word_size);
370 
371   add_allocation_to_fbl(p, raw_word_size);
372 
373   SOMETIMES(verify();)
374 }
375 
376 // Update statistics. This walks all in-use chunks.
377 void MetaspaceArena::add_to_statistics(ArenaStats* out) const {
378   for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) {
379     InUseChunkStats& ucs = out->_stats[c->level()];
380     ucs._num++;
381     ucs._word_size += c->word_size();
382     ucs._committed_words += c->committed_words();
383     ucs._used_words += c->used_words();
384     // Note: for free and waste, we only count what's committed.
385     if (c == current_chunk()) {
386       ucs._free_words += c->free_below_committed_words();
387     } else {
388       ucs._waste_words += c->free_below_committed_words();
389     }
390   }
391 
392   if (_fbl != nullptr) {

422 void MetaspaceArena::verify() const {
423   assert(_growth_policy != nullptr && _chunk_manager != nullptr, "Sanity");
424   _chunks.verify();
425   if (_fbl != nullptr) {
426     _fbl->verify();
427   }
428 }
429 
430 void MetaspaceArena::Fence::verify() const {
431   assert(_eye1 == EyeCatcher && _eye2 == EyeCatcher,
432          "Metaspace corruption: fence block at " PTR_FORMAT " broken.", p2i(this));
433 }
434 
435 void MetaspaceArena::verify_allocation_guards() const {
436   assert(Settings::use_allocation_guard(), "Don't call with guards disabled.");
437   for (const Fence* f = _first_fence; f != nullptr; f = f->next()) {
438     f->verify();
439   }
440 }
441 
442 // Returns true if the area indicated by pointer and size have actually been allocated
443 // from this arena.
444 bool MetaspaceArena::is_valid_area(MetaWord* p, size_t word_size) const {
445   assert(p != nullptr && word_size > 0, "Sanity");

446   bool found = false;
447   for (const Metachunk* c = _chunks.first(); c != nullptr && !found; c = c->next()) {
448     assert(c->is_valid_committed_pointer(p) ==
449            c->is_valid_committed_pointer(p + word_size - 1), "range intersects");
450     found = c->is_valid_committed_pointer(p);
451   }
452   return found;
453 }
454 
455 #endif // ASSERT
456 
457 void MetaspaceArena::print_on(outputStream* st) const {
458   st->print_cr("sm %s: %d chunks, total word size: " SIZE_FORMAT ", committed word size: " SIZE_FORMAT, _name,
459                _chunks.count(), _chunks.calc_word_size(), _chunks.calc_committed_word_size());
460   _chunks.print_on(st);
461   st->cr();
462   st->print_cr("growth-policy " PTR_FORMAT ", cm " PTR_FORMAT ", fbl " PTR_FORMAT,
463                 p2i(_growth_policy), p2i(_chunk_manager), p2i(_fbl));
464 }
465 
466 } // namespace metaspace
467 

 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "logging/log.hpp"
 28 #include "logging/logStream.hpp"
 29 #include "memory/metaspace/chunkManager.hpp"
 30 #include "memory/metaspace/counters.hpp"
 31 #include "memory/metaspace/freeBlocks.hpp"
 32 #include "memory/metaspace/internalStats.hpp"
 33 #include "memory/metaspace/metablock.inline.hpp"
 34 #include "memory/metaspace/metachunk.hpp"
 35 #include "memory/metaspace/metaspaceArena.hpp"
 36 #include "memory/metaspace/metaspaceArenaGrowthPolicy.hpp"
 37 #include "memory/metaspace/metaspaceCommon.hpp"
 38 #include "memory/metaspace/metaspaceContext.hpp"
 39 #include "memory/metaspace/metaspaceSettings.hpp"
 40 #include "memory/metaspace/metaspaceStatistics.hpp"
 41 #include "memory/metaspace/virtualSpaceList.hpp"
 42 #include "runtime/atomic.hpp"
 43 #include "runtime/init.hpp"
 44 #include "runtime/mutexLocker.hpp"
 45 #include "services/memoryService.hpp"
 46 #include "utilities/align.hpp"
 47 #include "utilities/debug.hpp"
 48 #include "utilities/globalDefinitions.hpp"
 49 
 50 namespace metaspace {
 51 
 52 #define LOGFMT         "Arena @" PTR_FORMAT " (%s)"
 53 #define LOGFMT_ARGS    p2i(this), this->_name
 54 
 55 // Returns the level of the next chunk to be added, acc to growth policy.
 56 chunklevel_t MetaspaceArena::next_chunk_level() const {
 57   const int growth_step = _chunks.count();
 58   return _growth_policy->get_level_at_step(growth_step);
 59 }
 60 
 61 // Given a chunk, return the committed remainder of this chunk.
 62 MetaBlock MetaspaceArena::salvage_chunk(Metachunk* c) {
 63   MetaBlock result;
 64   const size_t remaining_words = c->free_below_committed_words();
 65   if (remaining_words >= FreeBlocks::MinWordSize) {
 66 
 67     UL2(trace, "salvaging chunk " METACHUNK_FULL_FORMAT ".", METACHUNK_FULL_FORMAT_ARGS(c));
 68 
 69     MetaWord* ptr = c->allocate(remaining_words);
 70     assert(ptr != nullptr, "Should have worked");

 71 
 72     result = MetaBlock(ptr, remaining_words);
 73 
 74     // After this operation: the chunk should have no free committed space left.
 75     assert(c->free_below_committed_words() == 0,
 76            "Salvaging chunk failed (chunk " METACHUNK_FULL_FORMAT ").",
 77            METACHUNK_FULL_FORMAT_ARGS(c));
 78   }
 79   return result;
 80 }
 81 
 82 // Allocate a new chunk from the underlying chunk manager able to hold at least
 83 // requested word size.
 84 Metachunk* MetaspaceArena::allocate_new_chunk(size_t requested_word_size) {
 85   // Should this ever happen, we need to increase the maximum possible chunk size.
 86   guarantee(requested_word_size <= chunklevel::MAX_CHUNK_WORD_SIZE,
 87             "Requested size too large (" SIZE_FORMAT ") - max allowed size per allocation is " SIZE_FORMAT ".",
 88             requested_word_size, chunklevel::MAX_CHUNK_WORD_SIZE);
 89 
 90   const chunklevel_t max_level = chunklevel::level_fitting_word_size(requested_word_size);
 91   const chunklevel_t preferred_level = MIN2(max_level, next_chunk_level());
 92 
 93   Metachunk* c = _chunk_manager->get_chunk(preferred_level, max_level, requested_word_size);
 94   if (c == nullptr) {
 95     return nullptr;
 96   }
 97 
 98   assert(c->is_in_use(), "Wrong chunk state.");
 99   assert(c->free_below_committed_words() >= requested_word_size, "Chunk not committed");
100   return c;
101 }
102 
103 void MetaspaceArena::add_allocation_to_fbl(MetaBlock bl) {
104   assert(bl.is_nonempty(), "Sanity");
105   assert_block_base_aligned(bl, allocation_alignment_words());
106   assert_block_size_aligned(bl, Metaspace::min_allocation_alignment_words);

107   if (_fbl == nullptr) {
108     _fbl = new FreeBlocks(); // Create only on demand
109   }
110   _fbl->add_block(bl);
111 }
112 
113 MetaspaceArena::MetaspaceArena(MetaspaceContext* context,
114                const ArenaGrowthPolicy* growth_policy,
115                size_t allocation_alignment_words,
116                const char* name) :
117   _allocation_alignment_words(allocation_alignment_words),
118   _chunk_manager(context->cm()),
119   _growth_policy(growth_policy),
120   _chunks(),
121   _fbl(nullptr),
122   _total_used_words_counter(context->used_words_counter()),
123   _name(name)
124 #ifdef ASSERT
125   , _first_fence(nullptr)
126 #endif
127 {
128   // Check arena allocation alignment
129   assert(is_power_of_2(_allocation_alignment_words) &&
130          _allocation_alignment_words >= Metaspace::min_allocation_alignment_words &&
131          _allocation_alignment_words <= chunklevel::MIN_CHUNK_WORD_SIZE,
132          "Invalid alignment: %zu", _allocation_alignment_words);
133 
134   UL(debug, "born.");
135 
136   // Update statistics
137   InternalStats::inc_num_arena_births();
138 }
139 
140 MetaspaceArena::~MetaspaceArena() {
141 #ifdef ASSERT
142   SOMETIMES(verify();)
143   if (Settings::use_allocation_guard()) {
144     verify_allocation_guards();
145   }
146 #endif
147   MemRangeCounter return_counter;
148 
149   Metachunk* c = _chunks.first();
150   Metachunk* c2 = nullptr;
151 
152   while (c) {
153     c2 = c->next();
154     return_counter.add(c->used_words());
155     DEBUG_ONLY(c->set_prev(nullptr);)
156     DEBUG_ONLY(c->set_next(nullptr);)
157     UL2(debug, "return chunk: " METACHUNK_FORMAT ".", METACHUNK_FORMAT_ARGS(c));
158     _chunk_manager->return_chunk(c);
159     // c may be invalid after return_chunk(c) was called. Don't access anymore.
160     c = c2;
161   }
162 
163   UL2(debug, "returned %d chunks, total capacity " SIZE_FORMAT " words.",
164       return_counter.count(), return_counter.total_size());
165 
166   _total_used_words_counter->decrement_by(return_counter.total_size());
167   SOMETIMES(chunk_manager()->verify();)
168   delete _fbl;
169   UL(debug, ": dies.");
170 
171   // Update statistics
172   InternalStats::inc_num_arena_deaths();
173 }
174 
175 // Attempt to enlarge the current chunk to make it large enough to hold at least
176 //  requested_word_size additional words.
177 //
178 // On success, true is returned, false otherwise.
179 bool MetaspaceArena::attempt_enlarge_current_chunk(size_t requested_word_size) {
180   Metachunk* c = current_chunk();
181   assert(c->free_words() < requested_word_size, "Sanity");
182 
183   // Not if chunk enlargement is switched off...

208   if (!c->is_leader()) {
209     return false;
210   }
211   // If the size added to the chunk would be larger than allowed for the next growth step
212   // dont enlarge.
213   if (next_chunk_level() > c->level()) {
214     return false;
215   }
216 
217   bool success = _chunk_manager->attempt_enlarge_chunk(c);
218   assert(success == false || c->free_words() >= requested_word_size, "Sanity");
219   return success;
220 }
221 
222 // Allocate memory from Metaspace.
223 // 1) Attempt to allocate from the free block list.
224 // 2) Attempt to allocate from the current chunk.
225 // 3) Attempt to enlarge the current chunk in place if it is too small.
226 // 4) Attempt to get a new chunk and allocate from that chunk.
227 // At any point, if we hit a commit limit, we return null.
228 MetaBlock MetaspaceArena::allocate(size_t requested_word_size, MetaBlock& wastage) {
229   UL2(trace, "requested " SIZE_FORMAT " words.", requested_word_size);
230 

231   const size_t aligned_word_size = get_raw_word_size_for_requested_word_size(requested_word_size);
232 
233   MetaBlock result;
234   bool taken_from_fbl = false;
235 
236   // Before bothering the arena proper, attempt to re-use a block from the free blocks list
237   if (_fbl != nullptr && !_fbl->is_empty()) {
238     result = _fbl->remove_block(aligned_word_size);
239     if (result.is_nonempty()) {
240       assert_block_larger_or_equal(result, aligned_word_size);
241       assert_block_base_aligned(result, allocation_alignment_words());
242       assert_block_size_aligned(result, Metaspace::min_allocation_alignment_words);
243       // Split off wastage
244       wastage = result.split_off_tail(result.word_size() - aligned_word_size);
245       // Stats, logging
246       DEBUG_ONLY(InternalStats::inc_num_allocs_from_deallocated_blocks();)
247       UL2(trace, "returning " METABLOCKFORMAT " with wastage " METABLOCKFORMAT " - taken from fbl (now: %d, " SIZE_FORMAT ").",
248           METABLOCKFORMATARGS(result), METABLOCKFORMATARGS(wastage), _fbl->count(), _fbl->total_size());

249       // Note: free blocks in freeblock dictionary still count as "used" as far as statistics go;
250       // therefore we don't need to adjust any usage counters (see epilogue of allocate_inner()).
251       taken_from_fbl = true;

252     }
253   }
254 
255   if (result.is_empty()) {
256     // Free-block allocation failed; we allocate from the arena.
257     // These allocations are fenced.
258     size_t plus_fence = 0;
259   #ifdef ASSERT
260     static constexpr size_t fence_word_size = sizeof(Fence) / BytesPerWord;
261     STATIC_ASSERT(is_aligned(fence_word_size, Metaspace::min_allocation_alignment_words));
262     if (Settings::use_allocation_guard() &&
263         aligned_word_size <= Metaspace::max_allocation_word_size() - fence_word_size) {
264       plus_fence = fence_word_size;
265     }
266   #endif
267 
268     // Allocate from arena proper
269     result = allocate_inner(aligned_word_size + plus_fence, wastage);
270 
271   #ifdef ASSERT
272     if (result.is_nonempty() && plus_fence > 0) {
273       assert(result.word_size() == aligned_word_size + plus_fence, "Sanity");
274       MetaBlock fenceblock = result.split_off_tail(fence_word_size);
275       Fence* f = new(fenceblock.base()) Fence(_first_fence);







276       _first_fence = f;
277     }
278   #endif
279   } // End: allocate from arena proper
280 
281   // Logging
282   if (result.is_nonempty()) {
283     LogTarget(Trace, metaspace) lt;
284     if (lt.is_enabled()) {
285       LogStream ls(lt);
286       ls.print(LOGFMT ": returning " METABLOCKFORMAT " taken from %s, ", LOGFMT_ARGS,
287                METABLOCKFORMATARGS(result), (taken_from_fbl ? "fbl" : "arena"));
288       if (wastage.is_empty()) {
289         ls.print("no wastage");
290       } else {
291         ls.print("wastage " METABLOCKFORMAT, METABLOCKFORMATARGS(wastage));
292       }
293     }
294   } else {
295     UL(info, "allocation failed, returned null.");
296   }
297 
298   // Final sanity checks
299 #ifdef ASSERT
300     result.verify();
301     wastage.verify();
302     if (result.is_nonempty()) {
303       assert(result.word_size() == aligned_word_size &&
304              is_aligned(result.base(), _allocation_alignment_words * BytesPerWord),
305              "result bad or unaligned: " METABLOCKFORMAT ".", METABLOCKFORMATARGS(result));
306     }
307     if (wastage.is_nonempty()) {
308       assert(wastage.is_empty() ||
309              (wastage.is_aligned_base(Metaspace::min_allocation_alignment_words) &&
310               wastage.is_aligned_size(Metaspace::min_allocation_alignment_words)),
311              "Misaligned wastage: " METABLOCKFORMAT".", METABLOCKFORMATARGS(wastage));
312     }
313 #endif // ASSERT
314 
315   return result;
316 }
317 
318 // Allocate from the arena proper, once dictionary allocations and fencing are sorted out.
319 MetaBlock MetaspaceArena::allocate_inner(size_t word_size, MetaBlock& wastage) {

320 
321   MetaBlock result;
322   bool current_chunk_too_small = false;
323   bool commit_failure = false;
324   size_t alignment_gap_size = 0;
325 
326   if (current_chunk() != nullptr) {

327     // Attempt to satisfy the allocation from the current chunk.
328 
329     const MetaWord* const chunk_top = current_chunk()->top();
330     alignment_gap_size = align_up(chunk_top, _allocation_alignment_words * BytesPerWord) - chunk_top;
331     const size_t word_size_plus_alignment = word_size + alignment_gap_size;
332 
333     // If the current chunk is too small to hold the requested size, attempt to enlarge it.
334     // If that fails, retire the chunk.
335     if (current_chunk()->free_words() < word_size_plus_alignment) {
336       if (!attempt_enlarge_current_chunk(word_size_plus_alignment)) {
337         current_chunk_too_small = true;
338       } else {
339         DEBUG_ONLY(InternalStats::inc_num_chunks_enlarged();)
340         UL(debug, "enlarged chunk.");
341       }
342     }
343 
344     // Commit the chunk far enough to hold the requested word size. If that fails, we
345     // hit a limit (either GC threshold or MaxMetaspaceSize). In that case retire the
346     // chunk.
347     if (!current_chunk_too_small) {
348       if (!current_chunk()->ensure_committed_additional(word_size_plus_alignment)) {
349         UL2(info, "commit failure (requested size: " SIZE_FORMAT ")", word_size_plus_alignment);
350         commit_failure = true;
351       }
352     }
353 
354     // Allocate from the current chunk. This should work now.
355     if (!current_chunk_too_small && !commit_failure) {
356       MetaWord* const p_gap = current_chunk()->allocate(word_size_plus_alignment);
357       assert(p_gap != nullptr, "Allocation from chunk failed.");
358       MetaWord* const p_user_allocation = p_gap + alignment_gap_size;
359       result = MetaBlock(p_user_allocation, word_size);
360       if (alignment_gap_size > 0) {
361         NOT_LP64(assert(alignment_gap_size >= AllocationAlignmentWordSize, "Sanity"));
362         wastage = MetaBlock(p_gap, alignment_gap_size);
363       }
364     }
365   }
366 
367   if (result.is_empty()) {
368     // If we are here, we either had no current chunk to begin with or it was deemed insufficient.
369     assert(current_chunk() == nullptr ||
370            current_chunk_too_small || commit_failure, "Sanity");
371 
372     Metachunk* new_chunk = allocate_new_chunk(word_size);
373     if (new_chunk != nullptr) {
374       UL2(debug, "allocated new chunk " METACHUNK_FORMAT " for requested word size " SIZE_FORMAT ".",
375           METACHUNK_FORMAT_ARGS(new_chunk), word_size);
376 
377       assert(new_chunk->free_below_committed_words() >= word_size, "Sanity");
378 
379       // We have a new chunk. Before making it the current chunk, retire the old one.
380       if (current_chunk() != nullptr) {
381         wastage = salvage_chunk(current_chunk());
382         DEBUG_ONLY(InternalStats::inc_num_chunks_retired();)
383       }
384 
385       _chunks.add(new_chunk);
386 
387       // Now, allocate from that chunk. That should work. Note that the resulting allocation
388       // is guaranteed to be aligned to arena alignment, since arena alignment cannot be larger
389       // than smallest chunk size, and chunk starts are aligned by their size (buddy allocation).
390       MetaWord* const p = current_chunk()->allocate(word_size);
391       assert(p != nullptr, "Allocation from chunk failed.");
392       result = MetaBlock(p, word_size);
393     } else {
394       UL2(info, "failed to allocate new chunk for requested word size " SIZE_FORMAT ".", word_size);
395     }
396   }
397 
398   if (result.is_empty()) {
399     InternalStats::inc_num_allocs_failed_limit();
400   } else {
401     DEBUG_ONLY(InternalStats::inc_num_allocs();)
402     _total_used_words_counter->increment_by(word_size + wastage.word_size());
403   }
404 
405   SOMETIMES(verify();)
406 
407   if (result.is_nonempty()) {


408     UL2(trace, "after allocation: %u chunk(s), current:" METACHUNK_FULL_FORMAT,
409         _chunks.count(), METACHUNK_FULL_FORMAT_ARGS(current_chunk()));

410   }
411 
412 #ifdef ASSERT
413   if (wastage.is_nonempty()) {
414     // Wastage from arena allocations only occurs if either or both are true:
415     // - it is too small to hold the requested allocation words
416     // - it is misaligned
417     assert(!wastage.is_aligned_base(allocation_alignment_words()) ||
418            wastage.word_size() < word_size,
419            "Unexpected wastage: " METABLOCKFORMAT ", arena alignment: %zu, allocation word size: %zu",
420            METABLOCKFORMATARGS(wastage), allocation_alignment_words(), word_size);
421     wastage.verify();
422   }
423 #endif // ASSERT
424 
425   return result;
426 }
427 
428 // Prematurely returns a metaspace allocation to the _block_freelists
429 // because it is not needed anymore (requires CLD lock to be active).
430 void MetaspaceArena::deallocate(MetaBlock block) {
431   DEBUG_ONLY(block.verify();)
432   // This only matters on 32-bit:
433   // Since we always align up allocations from arena, we align up here, too.
434 #ifndef _LP64
435   MetaBlock raw_block(block.base(), get_raw_word_size_for_requested_word_size(block.word_size()));
436   add_allocation_to_fbl(raw_block);
437 #else
438   add_allocation_to_fbl(block);
439 #endif
440   UL2(trace, "added to fbl: " METABLOCKFORMAT ", (now: %d, " SIZE_FORMAT ").",
441       METABLOCKFORMATARGS(block), _fbl->count(), _fbl->total_size());







442   SOMETIMES(verify();)
443 }
444 
445 // Update statistics. This walks all in-use chunks.
446 void MetaspaceArena::add_to_statistics(ArenaStats* out) const {
447   for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) {
448     InUseChunkStats& ucs = out->_stats[c->level()];
449     ucs._num++;
450     ucs._word_size += c->word_size();
451     ucs._committed_words += c->committed_words();
452     ucs._used_words += c->used_words();
453     // Note: for free and waste, we only count what's committed.
454     if (c == current_chunk()) {
455       ucs._free_words += c->free_below_committed_words();
456     } else {
457       ucs._waste_words += c->free_below_committed_words();
458     }
459   }
460 
461   if (_fbl != nullptr) {

491 void MetaspaceArena::verify() const {
492   assert(_growth_policy != nullptr && _chunk_manager != nullptr, "Sanity");
493   _chunks.verify();
494   if (_fbl != nullptr) {
495     _fbl->verify();
496   }
497 }
498 
499 void MetaspaceArena::Fence::verify() const {
500   assert(_eye1 == EyeCatcher && _eye2 == EyeCatcher,
501          "Metaspace corruption: fence block at " PTR_FORMAT " broken.", p2i(this));
502 }
503 
504 void MetaspaceArena::verify_allocation_guards() const {
505   assert(Settings::use_allocation_guard(), "Don't call with guards disabled.");
506   for (const Fence* f = _first_fence; f != nullptr; f = f->next()) {
507     f->verify();
508   }
509 }
510 
511 // Returns true if the given block is contained in this arena
512 // Returns true if the given block is contained in this arena
513 bool MetaspaceArena::contains(MetaBlock bl) const {
514   DEBUG_ONLY(bl.verify();)
515   assert(bl.is_nonempty(), "Sanity");
516   bool found = false;
517   for (const Metachunk* c = _chunks.first(); c != nullptr && !found; c = c->next()) {
518     assert(c->is_valid_committed_pointer(bl.base()) ==
519            c->is_valid_committed_pointer(bl.end() - 1), "range intersects");
520     found = c->is_valid_committed_pointer(bl.base());
521   }
522   return found;
523 }
524 
525 #endif // ASSERT
526 
527 void MetaspaceArena::print_on(outputStream* st) const {
528   st->print_cr("sm %s: %d chunks, total word size: " SIZE_FORMAT ", committed word size: " SIZE_FORMAT, _name,
529                _chunks.count(), _chunks.calc_word_size(), _chunks.calc_committed_word_size());
530   _chunks.print_on(st);
531   st->cr();
532   st->print_cr("growth-policy " PTR_FORMAT ", cm " PTR_FORMAT ", fbl " PTR_FORMAT,
533                 p2i(_growth_policy), p2i(_chunk_manager), p2i(_fbl));
534 }
535 
536 } // namespace metaspace
537 
< prev index next >