1 /*
  2  * Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2020 SAP SE. All rights reserved.
  4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  5  *
  6  * This code is free software; you can redistribute it and/or modify it
  7  * under the terms of the GNU General Public License version 2 only, as
  8  * published by the Free Software Foundation.
  9  *
 10  * This code is distributed in the hope that it will be useful, but WITHOUT
 11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 13  * version 2 for more details (a copy is included in the LICENSE file that
 14  * accompanied this code).
 15  *
 16  * You should have received a copy of the GNU General Public License version
 17  * 2 along with this work; if not, write to the Free Software Foundation,
 18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 19  *
 20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 21  * or visit www.oracle.com if you need additional information or have any
 22  * questions.
 23  *
 24  */
 25 
 26 #ifndef SHARE_MEMORY_METASPACE_METASPACEARENA_HPP
 27 #define SHARE_MEMORY_METASPACE_METASPACEARENA_HPP
 28 
 29 #include "memory/allocation.hpp"
 30 #include "memory/metaspace.hpp"
 31 #include "memory/metaspace/counters.hpp"
 32 #include "memory/metaspace/metachunkList.hpp"
 33 
 34 class outputStream;
 35 class Mutex;
 36 
 37 namespace metaspace {
 38 
 39 class ArenaGrowthPolicy;
 40 class ChunkManager;
 41 class Metachunk;
 42 class FreeBlocks;
 43 
 44 struct ArenaStats;
 45 
 46 // The MetaspaceArena is a growable metaspace memory pool belonging to a CLD;
 47 //  internally it consists of a list of metaspace chunks, of which the head chunk
 48 //  is the current chunk from which we allocate via pointer bump.
 49 //
 50 //  +---------------+
 51 //  |     Arena     |
 52 //  +---------------+
 53 //            |
 54 //            | _chunks                                               commit top
 55 //            |                                                       v
 56 //        +----------+      +----------+      +----------+      +----------+
 57 //        | retired  | ---> | retired  | ---> | retired  | ---> | current  |
 58 //        | chunk    |      | chunk    |      | chunk    |      | chunk    |
 59 //        +----------+      +----------+      +----------+      +----------+
 60 //                                                                  ^
 61 //                                                                  used top
 62 //
 63 //        +------------+
 64 //        | FreeBlocks | --> O -> O -> O -> O
 65 //        +------------+
 66 //
 67 //
 68 
 69 // When the current chunk is used up, MetaspaceArena requests a new chunk from
 70 //  the associated ChunkManager.
 71 //
 72 // MetaspaceArena also keeps a FreeBlocks structure to manage memory blocks which
 73 //  had been deallocated prematurely.
 74 //
 75 
 76 class MetaspaceArena : public CHeapObj<mtClass> {
 77 
 78   // Please note that access to a metaspace arena may be shared
 79   // between threads and needs to be synchronized in CLMS.
 80 
 81   // Reference to the chunk manager to allocate chunks from.
 82   ChunkManager* const _chunk_manager;
 83 
 84   // Reference to the growth policy to use.
 85   const ArenaGrowthPolicy* const _growth_policy;
 86 
 87   // List of chunks. Head of the list is the current chunk.
 88   MetachunkList _chunks;
 89 
 90   // Structure to take care of leftover/deallocated space in used chunks.
 91   // Owned by the Arena. Gets allocated on demand only.
 92   FreeBlocks* _fbl;
 93 
 94   Metachunk* current_chunk()              { return _chunks.first(); }
 95   const Metachunk* current_chunk() const  { return _chunks.first(); }
 96 
 97   // Reference to an outside counter to keep track of used space.
 98   SizeAtomicCounter* const _total_used_words_counter;
 99 
100   // A name for purely debugging/logging purposes.
101   const char* const _name;
102 
103 #ifdef ASSERT
104   // Allocation guards: When active, arena allocations are interleaved with
105   //  fence allocations. An overwritten fence indicates a buffer overrun in either
106   //  the preceding or the following user block. All fences are linked together;
107   //  validating the fences just means walking that linked list.
108   // Note that for the Arena, fence blocks are just another form of user blocks.
109   class Fence {
110     static const uintx EyeCatcher =
111       NOT_LP64(0x77698465) LP64_ONLY(0x7769846577698465ULL); // "META" resp "METAMETA"
112     // Two eyecatchers to easily spot a corrupted _next pointer
113     const uintx _eye1;
114     const Fence* const _next;
115     NOT_LP64(uintx _dummy;)
116     const uintx _eye2;
117   public:
118     Fence(const Fence* next) : _eye1(EyeCatcher), _next(next), _eye2(EyeCatcher) {}
119     const Fence* next() const { return _next; }
120     void verify() const;
121   };
122   const Fence* _first_fence;
123 #endif // ASSERT
124 
125   ChunkManager* chunk_manager() const           { return _chunk_manager; }
126 
127   // free block list
128   FreeBlocks* fbl() const                       { return _fbl; }
129   void add_allocation_to_fbl(MetaWord* p, size_t word_size);
130 
131   // Given a chunk, add its remaining free committed space to the free block list.
132   void salvage_chunk(Metachunk* c);
133 
134   // Allocate a new chunk from the underlying chunk manager able to hold at least
135   // requested word size.
136   Metachunk* allocate_new_chunk(size_t requested_word_size);
137 
138   // Returns the level of the next chunk to be added, acc to growth policy.
139   chunklevel_t next_chunk_level() const;
140 
141   // Attempt to enlarge the current chunk to make it large enough to hold at least
142   //  requested_word_size additional words.
143   //
144   // On success, true is returned, false otherwise.
145   bool attempt_enlarge_current_chunk(size_t requested_word_size);
146 
147   // Returns true if the area indicated by pointer and size have actually been allocated
148   // from this arena.
149   DEBUG_ONLY(bool is_valid_area(MetaWord* p, size_t word_size) const;)
150 
151   // Allocate from the arena proper, once dictionary allocations and fencing are sorted out.
152   MetaWord* allocate_inner(size_t word_size);
153 
154 public:
155 
156   MetaspaceArena(ChunkManager* chunk_manager, const ArenaGrowthPolicy* growth_policy,
157                  SizeAtomicCounter* total_used_words_counter,
158                  const char* name);
159 
160   ~MetaspaceArena();
161 
162   // Allocate memory from Metaspace.
163   // 1) Attempt to allocate from the dictionary of deallocated blocks.
164   // 2) Attempt to allocate from the current chunk.
165   // 3) Attempt to enlarge the current chunk in place if it is too small.
166   // 4) Attempt to get a new chunk and allocate from that chunk.
167   // At any point, if we hit a commit limit, we return null.
168   MetaWord* allocate(size_t word_size);
169 
170   // Prematurely returns a metaspace allocation to the _block_freelists because it is not
171   // needed anymore.
172   void deallocate(MetaWord* p, size_t word_size);
173 
174   // Update statistics. This walks all in-use chunks.
175   void add_to_statistics(ArenaStats* out) const;
176 
177   // Convenience method to get the most important usage statistics.
178   // For deeper analysis use add_to_statistics().
179   void usage_numbers(size_t* p_used_words, size_t* p_committed_words, size_t* p_capacity_words) const;
180 
181   DEBUG_ONLY(void verify() const;)
182   DEBUG_ONLY(void verify_allocation_guards() const;)
183 
184   void print_on(outputStream* st) const;
185 
186 };
187 
188 } // namespace metaspace
189 
190 #endif // SHARE_MEMORY_METASPACE_METASPACEARENA_HPP
191