1 /*
  2  * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020, 2023 SAP SE. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #include "precompiled.hpp"
 27 #include "logging/log.hpp"
 28 #include "logging/logStream.hpp"
 29 #include "memory/metaspace/chunkManager.hpp"
 30 #include "memory/metaspace/counters.hpp"
 31 #include "memory/metaspace/freeBlocks.hpp"
 32 #include "memory/metaspace/internalStats.hpp"
 33 #include "memory/metaspace/metablock.inline.hpp"
 34 #include "memory/metaspace/metachunk.hpp"
 35 #include "memory/metaspace/metaspaceArena.hpp"
 36 #include "memory/metaspace/metaspaceArenaGrowthPolicy.hpp"
 37 #include "memory/metaspace/metaspaceCommon.hpp"
 38 #include "memory/metaspace/metaspaceContext.hpp"
 39 #include "memory/metaspace/metaspaceSettings.hpp"
 40 #include "memory/metaspace/metaspaceStatistics.hpp"
 41 #include "memory/metaspace/virtualSpaceList.hpp"
 42 #include "runtime/atomic.hpp"
 43 #include "runtime/init.hpp"
 44 #include "runtime/mutexLocker.hpp"
 45 #include "services/memoryService.hpp"
 46 #include "utilities/align.hpp"
 47 #include "utilities/debug.hpp"
 48 #include "utilities/globalDefinitions.hpp"
 49 
 50 namespace metaspace {
 51 
 52 #define LOGFMT         "Arena @" PTR_FORMAT " (%s)"
 53 #define LOGFMT_ARGS    p2i(this), this->_name
 54 
 55 // Returns the level of the next chunk to be added, acc to growth policy.
 56 chunklevel_t MetaspaceArena::next_chunk_level() const {
 57   const int growth_step = _chunks.count();
 58   return _growth_policy->get_level_at_step(growth_step);
 59 }
 60 
 61 // Given a chunk, return the committed remainder of this chunk.
 62 MetaBlock MetaspaceArena::salvage_chunk(Metachunk* c) {
 63   MetaBlock result;
 64   const size_t remaining_words = c->free_below_committed_words();
 65   if (remaining_words >= FreeBlocks::MinWordSize) {
 66 
 67     UL2(trace, "salvaging chunk " METACHUNK_FULL_FORMAT ".", METACHUNK_FULL_FORMAT_ARGS(c));
 68 
 69     MetaWord* ptr = c->allocate(remaining_words);
 70     assert(ptr != nullptr, "Should have worked");
 71 
 72     result = MetaBlock(ptr, remaining_words);
 73 
 74     // After this operation: the chunk should have no free committed space left.
 75     assert(c->free_below_committed_words() == 0,
 76            "Salvaging chunk failed (chunk " METACHUNK_FULL_FORMAT ").",
 77            METACHUNK_FULL_FORMAT_ARGS(c));
 78   }
 79   return result;
 80 }
 81 
 82 // Allocate a new chunk from the underlying chunk manager able to hold at least
 83 // requested word size.
 84 Metachunk* MetaspaceArena::allocate_new_chunk(size_t requested_word_size) {
 85   // Should this ever happen, we need to increase the maximum possible chunk size.
 86   guarantee(requested_word_size <= chunklevel::MAX_CHUNK_WORD_SIZE,
 87             "Requested size too large (" SIZE_FORMAT ") - max allowed size per allocation is " SIZE_FORMAT ".",
 88             requested_word_size, chunklevel::MAX_CHUNK_WORD_SIZE);
 89 
 90   const chunklevel_t max_level = chunklevel::level_fitting_word_size(requested_word_size);
 91   const chunklevel_t preferred_level = MIN2(max_level, next_chunk_level());
 92 
 93   Metachunk* c = _chunk_manager->get_chunk(preferred_level, max_level, requested_word_size);
 94   if (c == nullptr) {
 95     return nullptr;
 96   }
 97 
 98   assert(c->is_in_use(), "Wrong chunk state.");
 99   assert(c->free_below_committed_words() >= requested_word_size, "Chunk not committed");
100   return c;
101 }
102 
103 void MetaspaceArena::add_allocation_to_fbl(MetaBlock bl) {
104   assert(bl.is_nonempty(), "Sanity");
105   assert_block_base_aligned(bl, allocation_alignment_words());
106   assert_block_size_aligned(bl, Metaspace::min_allocation_alignment_words);
107   if (_fbl == nullptr) {
108     _fbl = new FreeBlocks(); // Create only on demand
109   }
110   _fbl->add_block(bl);
111 }
112 
113 MetaspaceArena::MetaspaceArena(MetaspaceContext* context,
114                const ArenaGrowthPolicy* growth_policy,
115                size_t allocation_alignment_words,
116                const char* name) :
117   _allocation_alignment_words(allocation_alignment_words),
118   _chunk_manager(context->cm()),
119   _growth_policy(growth_policy),
120   _chunks(),
121   _fbl(nullptr),
122   _total_used_words_counter(context->used_words_counter()),
123   _name(name)
124 #ifdef ASSERT
125   , _first_fence(nullptr)
126 #endif
127 {
128   // Check arena allocation alignment
129   assert(is_power_of_2(_allocation_alignment_words) &&
130          _allocation_alignment_words >= Metaspace::min_allocation_alignment_words &&
131          _allocation_alignment_words <= chunklevel::MIN_CHUNK_WORD_SIZE,
132          "Invalid alignment: %zu", _allocation_alignment_words);
133 
134   UL(debug, "born.");
135 
136   // Update statistics
137   InternalStats::inc_num_arena_births();
138 }
139 
140 MetaspaceArena::~MetaspaceArena() {
141 #ifdef ASSERT
142   SOMETIMES(verify();)
143   if (Settings::use_allocation_guard()) {
144     verify_allocation_guards();
145   }
146 #endif
147   MemRangeCounter return_counter;
148 
149   Metachunk* c = _chunks.first();
150   Metachunk* c2 = nullptr;
151 
152   while (c) {
153     c2 = c->next();
154     return_counter.add(c->used_words());
155     DEBUG_ONLY(c->set_prev(nullptr);)
156     DEBUG_ONLY(c->set_next(nullptr);)
157     UL2(debug, "return chunk: " METACHUNK_FORMAT ".", METACHUNK_FORMAT_ARGS(c));
158     _chunk_manager->return_chunk(c);
159     // c may be invalid after return_chunk(c) was called. Don't access anymore.
160     c = c2;
161   }
162 
163   UL2(debug, "returned %d chunks, total capacity " SIZE_FORMAT " words.",
164       return_counter.count(), return_counter.total_size());
165 
166   _total_used_words_counter->decrement_by(return_counter.total_size());
167   SOMETIMES(chunk_manager()->verify();)
168   delete _fbl;
169   UL(debug, ": dies.");
170 
171   // Update statistics
172   InternalStats::inc_num_arena_deaths();
173 }
174 
175 // Attempt to enlarge the current chunk to make it large enough to hold at least
176 //  requested_word_size additional words.
177 //
178 // On success, true is returned, false otherwise.
179 bool MetaspaceArena::attempt_enlarge_current_chunk(size_t requested_word_size) {
180   Metachunk* c = current_chunk();
181   assert(c->free_words() < requested_word_size, "Sanity");
182 
183   // Not if chunk enlargement is switched off...
184   if (Settings::enlarge_chunks_in_place() == false) {
185     return false;
186   }
187   // ... nor if we are already a root chunk ...
188   if (c->is_root_chunk()) {
189     return false;
190   }
191   // ... nor if the combined size of chunk content and new content would bring us above the size of a root chunk ...
192   if ((c->used_words() + requested_word_size) > metaspace::chunklevel::MAX_CHUNK_WORD_SIZE) {
193     return false;
194   }
195 
196   const chunklevel_t new_level =
197       chunklevel::level_fitting_word_size(c->used_words() + requested_word_size);
198   assert(new_level < c->level(), "Sanity");
199 
200   // Atm we only enlarge by one level (so, doubling the chunk in size). So, if the requested enlargement
201   // would require the chunk to more than double in size, we bail. But this covers about 99% of all cases,
202   // so this is good enough.
203   if (new_level < c->level() - 1) {
204     return false;
205   }
206   // This only works if chunk is the leader of its buddy pair (and also if buddy
207   // is free and unsplit, but that we cannot check outside of metaspace lock).
208   if (!c->is_leader()) {
209     return false;
210   }
211   // If the size added to the chunk would be larger than allowed for the next growth step
212   // dont enlarge.
213   if (next_chunk_level() > c->level()) {
214     return false;
215   }
216 
217   bool success = _chunk_manager->attempt_enlarge_chunk(c);
218   assert(success == false || c->free_words() >= requested_word_size, "Sanity");
219   return success;
220 }
221 
222 // Allocate memory from Metaspace.
223 // 1) Attempt to allocate from the free block list.
224 // 2) Attempt to allocate from the current chunk.
225 // 3) Attempt to enlarge the current chunk in place if it is too small.
226 // 4) Attempt to get a new chunk and allocate from that chunk.
227 // At any point, if we hit a commit limit, we return null.
228 MetaBlock MetaspaceArena::allocate(size_t requested_word_size, MetaBlock& wastage) {
229   UL2(trace, "requested " SIZE_FORMAT " words.", requested_word_size);
230 
231   const size_t aligned_word_size = get_raw_word_size_for_requested_word_size(requested_word_size);
232 
233   MetaBlock result;
234   bool taken_from_fbl = false;
235 
236   // Before bothering the arena proper, attempt to re-use a block from the free blocks list
237   if (_fbl != nullptr && !_fbl->is_empty()) {
238     result = _fbl->remove_block(aligned_word_size);
239     if (result.is_nonempty()) {
240       assert_block_larger_or_equal(result, aligned_word_size);
241       assert_block_base_aligned(result, allocation_alignment_words());
242       assert_block_size_aligned(result, Metaspace::min_allocation_alignment_words);
243       // Split off wastage
244       wastage = result.split_off_tail(result.word_size() - aligned_word_size);
245       // Stats, logging
246       DEBUG_ONLY(InternalStats::inc_num_allocs_from_deallocated_blocks();)
247       UL2(trace, "returning " METABLOCKFORMAT " with wastage " METABLOCKFORMAT " - taken from fbl (now: %d, " SIZE_FORMAT ").",
248           METABLOCKFORMATARGS(result), METABLOCKFORMATARGS(wastage), _fbl->count(), _fbl->total_size());
249       // Note: free blocks in freeblock dictionary still count as "used" as far as statistics go;
250       // therefore we don't need to adjust any usage counters (see epilogue of allocate_inner()).
251       taken_from_fbl = true;
252     }
253   }
254 
255   if (result.is_empty()) {
256     // Free-block allocation failed; we allocate from the arena.
257     // These allocations are fenced.
258     size_t plus_fence = 0;
259   #ifdef ASSERT
260     static constexpr size_t fence_word_size = sizeof(Fence) / BytesPerWord;
261     STATIC_ASSERT(is_aligned(fence_word_size, Metaspace::min_allocation_alignment_words));
262     if (Settings::use_allocation_guard() &&
263         aligned_word_size <= Metaspace::max_allocation_word_size() - fence_word_size) {
264       plus_fence = fence_word_size;
265     }
266   #endif
267 
268     // Allocate from arena proper
269     result = allocate_inner(aligned_word_size + plus_fence, wastage);
270 
271   #ifdef ASSERT
272     if (result.is_nonempty() && plus_fence > 0) {
273       assert(result.word_size() == aligned_word_size + plus_fence, "Sanity");
274       MetaBlock fenceblock = result.split_off_tail(fence_word_size);
275       Fence* f = new(fenceblock.base()) Fence(_first_fence);
276       _first_fence = f;
277     }
278   #endif
279   } // End: allocate from arena proper
280 
281   // Logging
282   if (result.is_nonempty()) {
283     LogTarget(Trace, metaspace) lt;
284     if (lt.is_enabled()) {
285       LogStream ls(lt);
286       ls.print(LOGFMT ": returning " METABLOCKFORMAT " taken from %s, ", LOGFMT_ARGS,
287                METABLOCKFORMATARGS(result), (taken_from_fbl ? "fbl" : "arena"));
288       if (wastage.is_empty()) {
289         ls.print("no wastage");
290       } else {
291         ls.print("wastage " METABLOCKFORMAT, METABLOCKFORMATARGS(wastage));
292       }
293     }
294   } else {
295     UL(info, "allocation failed, returned null.");
296   }
297 
298   // Final sanity checks
299 #ifdef ASSERT
300     result.verify();
301     wastage.verify();
302     if (result.is_nonempty()) {
303       assert(result.word_size() == aligned_word_size &&
304              is_aligned(result.base(), _allocation_alignment_words * BytesPerWord),
305              "result bad or unaligned: " METABLOCKFORMAT ".", METABLOCKFORMATARGS(result));
306     }
307     if (wastage.is_nonempty()) {
308       assert(wastage.is_empty() ||
309              (wastage.is_aligned_base(Metaspace::min_allocation_alignment_words) &&
310               wastage.is_aligned_size(Metaspace::min_allocation_alignment_words)),
311              "Misaligned wastage: " METABLOCKFORMAT".", METABLOCKFORMATARGS(wastage));
312     }
313 #endif // ASSERT
314 
315   return result;
316 }
317 
318 // Allocate from the arena proper, once dictionary allocations and fencing are sorted out.
319 MetaBlock MetaspaceArena::allocate_inner(size_t word_size, MetaBlock& wastage) {
320 
321   MetaBlock result;
322   bool current_chunk_too_small = false;
323   bool commit_failure = false;
324   size_t alignment_gap_size = 0;
325 
326   if (current_chunk() != nullptr) {
327     // Attempt to satisfy the allocation from the current chunk.
328 
329     const MetaWord* const chunk_top = current_chunk()->top();
330     alignment_gap_size = align_up(chunk_top, _allocation_alignment_words * BytesPerWord) - chunk_top;
331     const size_t word_size_plus_alignment = word_size + alignment_gap_size;
332 
333     // If the current chunk is too small to hold the requested size, attempt to enlarge it.
334     // If that fails, retire the chunk.
335     if (current_chunk()->free_words() < word_size_plus_alignment) {
336       if (!attempt_enlarge_current_chunk(word_size_plus_alignment)) {
337         current_chunk_too_small = true;
338       } else {
339         DEBUG_ONLY(InternalStats::inc_num_chunks_enlarged();)
340         UL(debug, "enlarged chunk.");
341       }
342     }
343 
344     // Commit the chunk far enough to hold the requested word size. If that fails, we
345     // hit a limit (either GC threshold or MaxMetaspaceSize). In that case retire the
346     // chunk.
347     if (!current_chunk_too_small) {
348       if (!current_chunk()->ensure_committed_additional(word_size_plus_alignment)) {
349         UL2(info, "commit failure (requested size: " SIZE_FORMAT ")", word_size_plus_alignment);
350         commit_failure = true;
351       }
352     }
353 
354     // Allocate from the current chunk. This should work now.
355     if (!current_chunk_too_small && !commit_failure) {
356       MetaWord* const p_gap = current_chunk()->allocate(word_size_plus_alignment);
357       assert(p_gap != nullptr, "Allocation from chunk failed.");
358       MetaWord* const p_user_allocation = p_gap + alignment_gap_size;
359       result = MetaBlock(p_user_allocation, word_size);
360       if (alignment_gap_size > 0) {
361         NOT_LP64(assert(alignment_gap_size >= AllocationAlignmentWordSize, "Sanity"));
362         wastage = MetaBlock(p_gap, alignment_gap_size);
363       }
364     }
365   }
366 
367   if (result.is_empty()) {
368     // If we are here, we either had no current chunk to begin with or it was deemed insufficient.
369     assert(current_chunk() == nullptr ||
370            current_chunk_too_small || commit_failure, "Sanity");
371 
372     Metachunk* new_chunk = allocate_new_chunk(word_size);
373     if (new_chunk != nullptr) {
374       UL2(debug, "allocated new chunk " METACHUNK_FORMAT " for requested word size " SIZE_FORMAT ".",
375           METACHUNK_FORMAT_ARGS(new_chunk), word_size);
376 
377       assert(new_chunk->free_below_committed_words() >= word_size, "Sanity");
378 
379       // We have a new chunk. Before making it the current chunk, retire the old one.
380       if (current_chunk() != nullptr) {
381         wastage = salvage_chunk(current_chunk());
382         DEBUG_ONLY(InternalStats::inc_num_chunks_retired();)
383       }
384 
385       _chunks.add(new_chunk);
386 
387       // Now, allocate from that chunk. That should work. Note that the resulting allocation
388       // is guaranteed to be aligned to arena alignment, since arena alignment cannot be larger
389       // than smallest chunk size, and chunk starts are aligned by their size (buddy allocation).
390       MetaWord* const p = current_chunk()->allocate(word_size);
391       assert(p != nullptr, "Allocation from chunk failed.");
392       result = MetaBlock(p, word_size);
393     } else {
394       UL2(info, "failed to allocate new chunk for requested word size " SIZE_FORMAT ".", word_size);
395     }
396   }
397 
398   if (result.is_empty()) {
399     InternalStats::inc_num_allocs_failed_limit();
400   } else {
401     DEBUG_ONLY(InternalStats::inc_num_allocs();)
402     _total_used_words_counter->increment_by(word_size + wastage.word_size());
403   }
404 
405   SOMETIMES(verify();)
406 
407   if (result.is_nonempty()) {
408     UL2(trace, "after allocation: %u chunk(s), current:" METACHUNK_FULL_FORMAT,
409         _chunks.count(), METACHUNK_FULL_FORMAT_ARGS(current_chunk()));
410   }
411 
412 #ifdef ASSERT
413   if (wastage.is_nonempty()) {
414     // Wastage from arena allocations only occurs if either or both are true:
415     // - it is too small to hold the requested allocation words
416     // - it is misaligned
417     assert(!wastage.is_aligned_base(allocation_alignment_words()) ||
418            wastage.word_size() < word_size,
419            "Unexpected wastage: " METABLOCKFORMAT ", arena alignment: %zu, allocation word size: %zu",
420            METABLOCKFORMATARGS(wastage), allocation_alignment_words(), word_size);
421     wastage.verify();
422   }
423 #endif // ASSERT
424 
425   return result;
426 }
427 
428 // Prematurely returns a metaspace allocation to the _block_freelists
429 // because it is not needed anymore (requires CLD lock to be active).
430 void MetaspaceArena::deallocate(MetaBlock block) {
431   DEBUG_ONLY(block.verify();)
432   // This only matters on 32-bit:
433   // Since we always align up allocations from arena, we align up here, too.
434 #ifndef _LP64
435   MetaBlock raw_block(block.base(), get_raw_word_size_for_requested_word_size(block.word_size()));
436   add_allocation_to_fbl(raw_block);
437 #else
438   add_allocation_to_fbl(block);
439 #endif
440   UL2(trace, "added to fbl: " METABLOCKFORMAT ", (now: %d, " SIZE_FORMAT ").",
441       METABLOCKFORMATARGS(block), _fbl->count(), _fbl->total_size());
442   SOMETIMES(verify();)
443 }
444 
445 // Update statistics. This walks all in-use chunks.
446 void MetaspaceArena::add_to_statistics(ArenaStats* out) const {
447   for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) {
448     InUseChunkStats& ucs = out->_stats[c->level()];
449     ucs._num++;
450     ucs._word_size += c->word_size();
451     ucs._committed_words += c->committed_words();
452     ucs._used_words += c->used_words();
453     // Note: for free and waste, we only count what's committed.
454     if (c == current_chunk()) {
455       ucs._free_words += c->free_below_committed_words();
456     } else {
457       ucs._waste_words += c->free_below_committed_words();
458     }
459   }
460 
461   if (_fbl != nullptr) {
462     out->_free_blocks_num += _fbl->count();
463     out->_free_blocks_word_size += _fbl->total_size();
464   }
465 
466   SOMETIMES(out->verify();)
467 }
468 
469 // Convenience method to get the most important usage statistics.
470 // For deeper analysis use add_to_statistics().
471 void MetaspaceArena::usage_numbers(size_t* p_used_words, size_t* p_committed_words, size_t* p_capacity_words) const {
472   size_t used = 0, comm = 0, cap = 0;
473   for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) {
474     used += c->used_words();
475     comm += c->committed_words();
476     cap += c->word_size();
477   }
478   if (p_used_words != nullptr) {
479     *p_used_words = used;
480   }
481   if (p_committed_words != nullptr) {
482     *p_committed_words = comm;
483   }
484   if (p_capacity_words != nullptr) {
485     *p_capacity_words = cap;
486   }
487 }
488 
489 #ifdef ASSERT
490 
491 void MetaspaceArena::verify() const {
492   assert(_growth_policy != nullptr && _chunk_manager != nullptr, "Sanity");
493   _chunks.verify();
494   if (_fbl != nullptr) {
495     _fbl->verify();
496   }
497 }
498 
499 void MetaspaceArena::Fence::verify() const {
500   assert(_eye1 == EyeCatcher && _eye2 == EyeCatcher,
501          "Metaspace corruption: fence block at " PTR_FORMAT " broken.", p2i(this));
502 }
503 
504 void MetaspaceArena::verify_allocation_guards() const {
505   assert(Settings::use_allocation_guard(), "Don't call with guards disabled.");
506   for (const Fence* f = _first_fence; f != nullptr; f = f->next()) {
507     f->verify();
508   }
509 }
510 
511 // Returns true if the given block is contained in this arena
512 // Returns true if the given block is contained in this arena
513 bool MetaspaceArena::contains(MetaBlock bl) const {
514   DEBUG_ONLY(bl.verify();)
515   assert(bl.is_nonempty(), "Sanity");
516   bool found = false;
517   for (const Metachunk* c = _chunks.first(); c != nullptr && !found; c = c->next()) {
518     assert(c->is_valid_committed_pointer(bl.base()) ==
519            c->is_valid_committed_pointer(bl.end() - 1), "range intersects");
520     found = c->is_valid_committed_pointer(bl.base());
521   }
522   return found;
523 }
524 
525 #endif // ASSERT
526 
527 void MetaspaceArena::print_on(outputStream* st) const {
528   st->print_cr("sm %s: %d chunks, total word size: " SIZE_FORMAT ", committed word size: " SIZE_FORMAT, _name,
529                _chunks.count(), _chunks.calc_word_size(), _chunks.calc_committed_word_size());
530   _chunks.print_on(st);
531   st->cr();
532   st->print_cr("growth-policy " PTR_FORMAT ", cm " PTR_FORMAT ", fbl " PTR_FORMAT,
533                 p2i(_growth_policy), p2i(_chunk_manager), p2i(_fbl));
534 }
535 
536 } // namespace metaspace
537