1 /* 2 * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2020, 2023 SAP SE. All rights reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "logging/log.hpp" 28 #include "logging/logStream.hpp" 29 #include "memory/metaspace/chunkManager.hpp" 30 #include "memory/metaspace/counters.hpp" 31 #include "memory/metaspace/freeBlocks.hpp" 32 #include "memory/metaspace/internalStats.hpp" 33 #include "memory/metaspace/metachunk.hpp" 34 #include "memory/metaspace/metaspaceArena.hpp" 35 #include "memory/metaspace/metaspaceArenaGrowthPolicy.hpp" 36 #include "memory/metaspace/metaspaceCommon.hpp" 37 #include "memory/metaspace/metaspaceSettings.hpp" 38 #include "memory/metaspace/metaspaceStatistics.hpp" 39 #include "memory/metaspace/virtualSpaceList.hpp" 40 #include "runtime/atomic.hpp" 41 #include "runtime/init.hpp" 42 #include "runtime/mutexLocker.hpp" 43 #include "services/memoryService.hpp" 44 #include "utilities/align.hpp" 45 #include "utilities/debug.hpp" 46 #include "utilities/globalDefinitions.hpp" 47 48 namespace metaspace { 49 50 #define LOGFMT "Arena @" PTR_FORMAT " (%s)" 51 #define LOGFMT_ARGS p2i(this), this->_name 52 53 // Returns the level of the next chunk to be added, acc to growth policy. 54 chunklevel_t MetaspaceArena::next_chunk_level() const { 55 const int growth_step = _chunks.count(); 56 return _growth_policy->get_level_at_step(growth_step); 57 } 58 59 // Given a chunk, add its remaining free committed space to the free block list. 60 void MetaspaceArena::salvage_chunk(Metachunk* c) { 61 size_t remaining_words = c->free_below_committed_words(); 62 if (remaining_words >= FreeBlocks::MinWordSize) { 63 64 UL2(trace, "salvaging chunk " METACHUNK_FULL_FORMAT ".", METACHUNK_FULL_FORMAT_ARGS(c)); 65 66 MetaWord* ptr = c->allocate(remaining_words); 67 assert(ptr != nullptr, "Should have worked"); 68 _total_used_words_counter->increment_by(remaining_words); 69 70 add_allocation_to_fbl(ptr, remaining_words); 71 72 // After this operation: the chunk should have no free committed space left. 73 assert(c->free_below_committed_words() == 0, 74 "Salvaging chunk failed (chunk " METACHUNK_FULL_FORMAT ").", 75 METACHUNK_FULL_FORMAT_ARGS(c)); 76 } 77 } 78 79 // Allocate a new chunk from the underlying chunk manager able to hold at least 80 // requested word size. 81 Metachunk* MetaspaceArena::allocate_new_chunk(size_t requested_word_size) { 82 // Should this ever happen, we need to increase the maximum possible chunk size. 83 guarantee(requested_word_size <= chunklevel::MAX_CHUNK_WORD_SIZE, 84 "Requested size too large (" SIZE_FORMAT ") - max allowed size per allocation is " SIZE_FORMAT ".", 85 requested_word_size, chunklevel::MAX_CHUNK_WORD_SIZE); 86 87 const chunklevel_t max_level = chunklevel::level_fitting_word_size(requested_word_size); 88 const chunklevel_t preferred_level = MIN2(max_level, next_chunk_level()); 89 90 Metachunk* c = _chunk_manager->get_chunk(preferred_level, max_level, requested_word_size); 91 if (c == nullptr) { 92 return nullptr; 93 } 94 95 assert(c->is_in_use(), "Wrong chunk state."); 96 assert(c->free_below_committed_words() >= requested_word_size, "Chunk not committed"); 97 return c; 98 } 99 100 void MetaspaceArena::add_allocation_to_fbl(MetaWord* p, size_t word_size) { 101 assert(p != nullptr, "p is null"); 102 assert_is_aligned_metaspace_pointer(p); 103 assert(word_size > 0, "zero sized"); 104 105 if (_fbl == nullptr) { 106 _fbl = new FreeBlocks(); // Create only on demand 107 } 108 _fbl->add_block(p, word_size); 109 } 110 111 MetaspaceArena::MetaspaceArena(ChunkManager* chunk_manager, const ArenaGrowthPolicy* growth_policy, 112 SizeAtomicCounter* total_used_words_counter, 113 const char* name) : 114 _chunk_manager(chunk_manager), 115 _growth_policy(growth_policy), 116 _chunks(), 117 _fbl(nullptr), 118 _total_used_words_counter(total_used_words_counter), 119 _name(name) 120 #ifdef ASSERT 121 , _first_fence(nullptr) 122 #endif 123 { 124 UL(debug, ": born."); 125 126 // Update statistics 127 InternalStats::inc_num_arena_births(); 128 } 129 130 MetaspaceArena::~MetaspaceArena() { 131 #ifdef ASSERT 132 SOMETIMES(verify();) 133 if (Settings::use_allocation_guard()) { 134 verify_allocation_guards(); 135 } 136 #endif 137 MemRangeCounter return_counter; 138 139 Metachunk* c = _chunks.first(); 140 Metachunk* c2 = nullptr; 141 142 while (c) { 143 c2 = c->next(); 144 return_counter.add(c->used_words()); 145 DEBUG_ONLY(c->set_prev(nullptr);) 146 DEBUG_ONLY(c->set_next(nullptr);) 147 UL2(debug, "return chunk: " METACHUNK_FORMAT ".", METACHUNK_FORMAT_ARGS(c)); 148 _chunk_manager->return_chunk(c); 149 // c may be invalid after return_chunk(c) was called. Don't access anymore. 150 c = c2; 151 } 152 153 UL2(info, "returned %d chunks, total capacity " SIZE_FORMAT " words.", 154 return_counter.count(), return_counter.total_size()); 155 156 _total_used_words_counter->decrement_by(return_counter.total_size()); 157 SOMETIMES(chunk_manager()->verify();) 158 delete _fbl; 159 UL(debug, ": dies."); 160 161 // Update statistics 162 InternalStats::inc_num_arena_deaths(); 163 } 164 165 // Attempt to enlarge the current chunk to make it large enough to hold at least 166 // requested_word_size additional words. 167 // 168 // On success, true is returned, false otherwise. 169 bool MetaspaceArena::attempt_enlarge_current_chunk(size_t requested_word_size) { 170 Metachunk* c = current_chunk(); 171 assert(c->free_words() < requested_word_size, "Sanity"); 172 173 // Not if chunk enlargement is switched off... 174 if (Settings::enlarge_chunks_in_place() == false) { 175 return false; 176 } 177 // ... nor if we are already a root chunk ... 178 if (c->is_root_chunk()) { 179 return false; 180 } 181 // ... nor if the combined size of chunk content and new content would bring us above the size of a root chunk ... 182 if ((c->used_words() + requested_word_size) > metaspace::chunklevel::MAX_CHUNK_WORD_SIZE) { 183 return false; 184 } 185 186 const chunklevel_t new_level = 187 chunklevel::level_fitting_word_size(c->used_words() + requested_word_size); 188 assert(new_level < c->level(), "Sanity"); 189 190 // Atm we only enlarge by one level (so, doubling the chunk in size). So, if the requested enlargement 191 // would require the chunk to more than double in size, we bail. But this covers about 99% of all cases, 192 // so this is good enough. 193 if (new_level < c->level() - 1) { 194 return false; 195 } 196 // This only works if chunk is the leader of its buddy pair (and also if buddy 197 // is free and unsplit, but that we cannot check outside of metaspace lock). 198 if (!c->is_leader()) { 199 return false; 200 } 201 // If the size added to the chunk would be larger than allowed for the next growth step 202 // dont enlarge. 203 if (next_chunk_level() > c->level()) { 204 return false; 205 } 206 207 bool success = _chunk_manager->attempt_enlarge_chunk(c); 208 assert(success == false || c->free_words() >= requested_word_size, "Sanity"); 209 return success; 210 } 211 212 // Allocate memory from Metaspace. 213 // 1) Attempt to allocate from the free block list. 214 // 2) Attempt to allocate from the current chunk. 215 // 3) Attempt to enlarge the current chunk in place if it is too small. 216 // 4) Attempt to get a new chunk and allocate from that chunk. 217 // At any point, if we hit a commit limit, we return null. 218 MetaWord* MetaspaceArena::allocate(size_t requested_word_size) { 219 UL2(trace, "requested " SIZE_FORMAT " words.", requested_word_size); 220 221 MetaWord* p = nullptr; 222 const size_t aligned_word_size = get_raw_word_size_for_requested_word_size(requested_word_size); 223 224 // Before bothering the arena proper, attempt to re-use a block from the free blocks list 225 if (_fbl != nullptr && !_fbl->is_empty()) { 226 p = _fbl->remove_block(aligned_word_size); 227 if (p != nullptr) { 228 DEBUG_ONLY(InternalStats::inc_num_allocs_from_deallocated_blocks();) 229 UL2(trace, "returning " PTR_FORMAT " - taken from fbl (now: %d, " SIZE_FORMAT ").", 230 p2i(p), _fbl->count(), _fbl->total_size()); 231 assert_is_aligned_metaspace_pointer(p); 232 // Note: free blocks in freeblock dictionary still count as "used" as far as statistics go; 233 // therefore we have no need to adjust any usage counters (see epilogue of allocate_inner()) 234 // and can just return here. 235 return p; 236 } 237 } 238 239 // Primary allocation 240 p = allocate_inner(aligned_word_size); 241 242 #ifdef ASSERT 243 // Fence allocation 244 if (p != nullptr && Settings::use_allocation_guard()) { 245 STATIC_ASSERT(is_aligned(sizeof(Fence), BytesPerWord)); 246 MetaWord* guard = allocate_inner(sizeof(Fence) / BytesPerWord); 247 if (guard != nullptr) { 248 // Ignore allocation errors for the fence to keep coding simple. If this 249 // happens (e.g. because right at this time we hit the Metaspace GC threshold) 250 // we miss adding this one fence. Not a big deal. Note that his would 251 // be pretty rare. Chances are much higher the primary allocation above 252 // would have already failed). 253 Fence* f = new(guard) Fence(_first_fence); 254 _first_fence = f; 255 } 256 } 257 #endif // ASSERT 258 259 return p; 260 } 261 262 // Allocate from the arena proper, once dictionary allocations and fencing are sorted out. 263 MetaWord* MetaspaceArena::allocate_inner(size_t word_size) { 264 assert_is_aligned(word_size, metaspace::AllocationAlignmentWordSize); 265 266 MetaWord* p = nullptr; 267 bool current_chunk_too_small = false; 268 bool commit_failure = false; 269 270 if (current_chunk() != nullptr) { 271 272 // Attempt to satisfy the allocation from the current chunk. 273 274 // If the current chunk is too small to hold the requested size, attempt to enlarge it. 275 // If that fails, retire the chunk. 276 if (current_chunk()->free_words() < word_size) { 277 if (!attempt_enlarge_current_chunk(word_size)) { 278 current_chunk_too_small = true; 279 } else { 280 DEBUG_ONLY(InternalStats::inc_num_chunks_enlarged();) 281 UL(debug, "enlarged chunk."); 282 } 283 } 284 285 // Commit the chunk far enough to hold the requested word size. If that fails, we 286 // hit a limit (either GC threshold or MaxMetaspaceSize). In that case retire the 287 // chunk. 288 if (!current_chunk_too_small) { 289 if (!current_chunk()->ensure_committed_additional(word_size)) { 290 UL2(info, "commit failure (requested size: " SIZE_FORMAT ")", word_size); 291 commit_failure = true; 292 } 293 } 294 295 // Allocate from the current chunk. This should work now. 296 if (!current_chunk_too_small && !commit_failure) { 297 p = current_chunk()->allocate(word_size); 298 assert(p != nullptr, "Allocation from chunk failed."); 299 } 300 } 301 302 if (p == nullptr) { 303 // If we are here, we either had no current chunk to begin with or it was deemed insufficient. 304 assert(current_chunk() == nullptr || 305 current_chunk_too_small || commit_failure, "Sanity"); 306 307 Metachunk* new_chunk = allocate_new_chunk(word_size); 308 if (new_chunk != nullptr) { 309 UL2(debug, "allocated new chunk " METACHUNK_FORMAT " for requested word size " SIZE_FORMAT ".", 310 METACHUNK_FORMAT_ARGS(new_chunk), word_size); 311 312 assert(new_chunk->free_below_committed_words() >= word_size, "Sanity"); 313 314 // We have a new chunk. Before making it the current chunk, retire the old one. 315 if (current_chunk() != nullptr) { 316 salvage_chunk(current_chunk()); 317 DEBUG_ONLY(InternalStats::inc_num_chunks_retired();) 318 } 319 320 _chunks.add(new_chunk); 321 322 // Now, allocate from that chunk. That should work. 323 p = current_chunk()->allocate(word_size); 324 assert(p != nullptr, "Allocation from chunk failed."); 325 } else { 326 UL2(info, "failed to allocate new chunk for requested word size " SIZE_FORMAT ".", word_size); 327 } 328 } 329 330 if (p == nullptr) { 331 InternalStats::inc_num_allocs_failed_limit(); 332 } else { 333 DEBUG_ONLY(InternalStats::inc_num_allocs();) 334 _total_used_words_counter->increment_by(word_size); 335 } 336 337 SOMETIMES(verify();) 338 339 if (p == nullptr) { 340 UL(info, "allocation failed, returned null."); 341 } else { 342 UL2(trace, "after allocation: %u chunk(s), current:" METACHUNK_FULL_FORMAT, 343 _chunks.count(), METACHUNK_FULL_FORMAT_ARGS(current_chunk())); 344 UL2(trace, "returning " PTR_FORMAT ".", p2i(p)); 345 } 346 347 assert_is_aligned_metaspace_pointer(p); 348 349 return p; 350 } 351 352 // Prematurely returns a metaspace allocation to the _block_freelists 353 // because it is not needed anymore (requires CLD lock to be active). 354 void MetaspaceArena::deallocate(MetaWord* p, size_t word_size) { 355 // At this point a current chunk must exist since we only deallocate if we did allocate before. 356 assert(current_chunk() != nullptr, "stray deallocation?"); 357 assert(is_valid_area(p, word_size), 358 "Pointer range not part of this Arena and cannot be deallocated: (" PTR_FORMAT ".." PTR_FORMAT ").", 359 p2i(p), p2i(p + word_size)); 360 361 UL2(trace, "deallocating " PTR_FORMAT ", word size: " SIZE_FORMAT ".", 362 p2i(p), word_size); 363 364 // Only blocks that had been allocated via MetaspaceArena::allocate(size) must be handed in 365 // to MetaspaceArena::deallocate(), and only with the same size that had been original used for allocation. 366 // Therefore the pointer must be aligned correctly, and size can be alignment-adjusted (the latter 367 // only matters on 32-bit): 368 assert_is_aligned_metaspace_pointer(p); 369 size_t raw_word_size = get_raw_word_size_for_requested_word_size(word_size); 370 371 add_allocation_to_fbl(p, raw_word_size); 372 373 SOMETIMES(verify();) 374 } 375 376 // Update statistics. This walks all in-use chunks. 377 void MetaspaceArena::add_to_statistics(ArenaStats* out) const { 378 for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) { 379 InUseChunkStats& ucs = out->_stats[c->level()]; 380 ucs._num++; 381 ucs._word_size += c->word_size(); 382 ucs._committed_words += c->committed_words(); 383 ucs._used_words += c->used_words(); 384 // Note: for free and waste, we only count what's committed. 385 if (c == current_chunk()) { 386 ucs._free_words += c->free_below_committed_words(); 387 } else { 388 ucs._waste_words += c->free_below_committed_words(); 389 } 390 } 391 392 if (_fbl != nullptr) { 393 out->_free_blocks_num += _fbl->count(); 394 out->_free_blocks_word_size += _fbl->total_size(); 395 } 396 397 SOMETIMES(out->verify();) 398 } 399 400 // Convenience method to get the most important usage statistics. 401 // For deeper analysis use add_to_statistics(). 402 void MetaspaceArena::usage_numbers(size_t* p_used_words, size_t* p_committed_words, size_t* p_capacity_words) const { 403 size_t used = 0, comm = 0, cap = 0; 404 for (const Metachunk* c = _chunks.first(); c != nullptr; c = c->next()) { 405 used += c->used_words(); 406 comm += c->committed_words(); 407 cap += c->word_size(); 408 } 409 if (p_used_words != nullptr) { 410 *p_used_words = used; 411 } 412 if (p_committed_words != nullptr) { 413 *p_committed_words = comm; 414 } 415 if (p_capacity_words != nullptr) { 416 *p_capacity_words = cap; 417 } 418 } 419 420 #ifdef ASSERT 421 422 void MetaspaceArena::verify() const { 423 assert(_growth_policy != nullptr && _chunk_manager != nullptr, "Sanity"); 424 _chunks.verify(); 425 if (_fbl != nullptr) { 426 _fbl->verify(); 427 } 428 } 429 430 void MetaspaceArena::Fence::verify() const { 431 assert(_eye1 == EyeCatcher && _eye2 == EyeCatcher, 432 "Metaspace corruption: fence block at " PTR_FORMAT " broken.", p2i(this)); 433 } 434 435 void MetaspaceArena::verify_allocation_guards() const { 436 assert(Settings::use_allocation_guard(), "Don't call with guards disabled."); 437 for (const Fence* f = _first_fence; f != nullptr; f = f->next()) { 438 f->verify(); 439 } 440 } 441 442 // Returns true if the area indicated by pointer and size have actually been allocated 443 // from this arena. 444 bool MetaspaceArena::is_valid_area(MetaWord* p, size_t word_size) const { 445 assert(p != nullptr && word_size > 0, "Sanity"); 446 bool found = false; 447 for (const Metachunk* c = _chunks.first(); c != nullptr && !found; c = c->next()) { 448 assert(c->is_valid_committed_pointer(p) == 449 c->is_valid_committed_pointer(p + word_size - 1), "range intersects"); 450 found = c->is_valid_committed_pointer(p); 451 } 452 return found; 453 } 454 455 #endif // ASSERT 456 457 void MetaspaceArena::print_on(outputStream* st) const { 458 st->print_cr("sm %s: %d chunks, total word size: " SIZE_FORMAT ", committed word size: " SIZE_FORMAT, _name, 459 _chunks.count(), _chunks.calc_word_size(), _chunks.calc_committed_word_size()); 460 _chunks.print_on(st); 461 st->cr(); 462 st->print_cr("growth-policy " PTR_FORMAT ", cm " PTR_FORMAT ", fbl " PTR_FORMAT, 463 p2i(_growth_policy), p2i(_chunk_manager), p2i(_fbl)); 464 } 465 466 } // namespace metaspace 467