1 /*
  2  * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #ifndef SHARE_CODE_CODECACHE_HPP
 26 #define SHARE_CODE_CODECACHE_HPP
 27 
 28 #include "code/codeBlob.hpp"
 29 #include "code/nmethod.hpp"
 30 #include "gc/shared/gcBehaviours.hpp"
 31 #include "memory/allocation.hpp"
 32 #include "memory/heap.hpp"
 33 #include "oops/instanceKlass.hpp"
 34 #include "oops/oopsHierarchy.hpp"
 35 #include "runtime/mutexLocker.hpp"
 36 #include "utilities/numberSeq.hpp"
 37 
 38 // The CodeCache implements the code cache for various pieces of generated
 39 // code, e.g., compiled java methods, runtime stubs, transition frames, etc.
 40 // The entries in the CodeCache are all CodeBlob's.
 41 
 42 // -- Implementation --
 43 // The CodeCache consists of one or more CodeHeaps, each of which contains
 44 // CodeBlobs of a specific CodeBlobType. Currently heaps for the following
 45 // types are available:
 46 //  - Non-nmethods: Non-nmethods like Buffers, Adapters and Runtime Stubs
 47 //  - Profiled nmethods: nmethods that are profiled, i.e., those
 48 //    executed at level 2 or 3
 49 //  - Non-Profiled nmethods: nmethods that are not profiled, i.e., those
 50 //    executed at level 1 or 4 and native methods
 51 //  - All: Used for code of all types if code cache segmentation is disabled.
 52 //
 53 // In the rare case of the non-nmethod code heap getting full, non-nmethod code
 54 // will be stored in the non-profiled code heap as a fallback solution.
 55 //
 56 // Depending on the availability of compilers and compilation mode there
 57 // may be fewer heaps. The size of the code heaps depends on the values of
 58 // ReservedCodeCacheSize, NonProfiledCodeHeapSize and ProfiledCodeHeapSize
 59 // (see CodeCache::heap_available(..) and CodeCache::initialize_heaps(..)
 60 // for details).
 61 //
 62 // Code cache segmentation is controlled by the flag SegmentedCodeCache.
 63 // If turned off, all code types are stored in a single code heap. By default
 64 // code cache segmentation is turned on if tiered mode is enabled and
 65 // ReservedCodeCacheSize >= 240 MB.
 66 //
 67 // All methods of the CodeCache accepting a CodeBlobType only apply to
 68 // CodeBlobs of the given type. For example, iteration over the
 69 // CodeBlobs of a specific type can be done by using CodeCache::first_blob(..)
 70 // and CodeCache::next_blob(..) and providing the corresponding CodeBlobType.
 71 //
 72 // IMPORTANT: If you add new CodeHeaps to the code cache or change the
 73 // existing ones, make sure to adapt the dtrace scripts (jhelper.d) for
 74 // Solaris and BSD.
 75 
 76 class ExceptionCache;
 77 class KlassDepChange;
 78 class OopClosure;
 79 class ShenandoahParallelCodeHeapIterator;
 80 class NativePostCallNop;
 81 class DeoptimizationScope;
 82 
 83 class CodeCache : AllStatic {
 84   friend class VMStructs;
 85   friend class JVMCIVMStructs;
 86   template <class T, class Filter, bool is_compiled_method> friend class CodeBlobIterator;
 87   friend class WhiteBox;
 88   friend class CodeCacheLoader;
 89   friend class ShenandoahParallelCodeHeapIterator;
 90  private:
 91   // CodeHeaps of the cache
 92   static GrowableArray<CodeHeap*>* _heaps;
 93   static GrowableArray<CodeHeap*>* _compiled_heaps;
 94   static GrowableArray<CodeHeap*>* _nmethod_heaps;
 95   static GrowableArray<CodeHeap*>* _allocable_heaps;
 96 
 97   static address _low_bound;                                 // Lower bound of CodeHeap addresses
 98   static address _high_bound;                                // Upper bound of CodeHeap addresses
 99   static volatile int _number_of_nmethods_with_dependencies; // Total number of nmethods with dependencies
100 
101   static uint8_t           _unloading_cycle;          // Global state for recognizing old nmethods that need to be unloaded
102   static uint64_t          _gc_epoch;                 // Global state for tracking when nmethods were found to be on-stack
103   static uint64_t          _cold_gc_count;            // Global state for determining how many GCs are needed before an nmethod is cold
104   static size_t            _last_unloading_used;
105   static double            _last_unloading_time;
106   static TruncatedSeq      _unloading_gc_intervals;
107   static TruncatedSeq      _unloading_allocation_rates;
108   static volatile bool     _unloading_threshold_gc_requested;
109 
110   static ExceptionCache* volatile _exception_cache_purge_list;
111 
112   // CodeHeap management
113   static void initialize_heaps();                             // Initializes the CodeHeaps
114   // Check the code heap sizes set by the user via command line
115   static void check_heap_sizes(size_t non_nmethod_size, size_t profiled_size, size_t non_profiled_size, size_t cache_size, bool all_set);
116   // Creates a new heap with the given name and size, containing CodeBlobs of the given type
117   static void add_heap(ReservedSpace rs, const char* name, CodeBlobType code_blob_type);
118   static CodeHeap* get_code_heap_containing(void* p);         // Returns the CodeHeap containing the given pointer, or nullptr
119   static CodeHeap* get_code_heap(const void* cb);             // Returns the CodeHeap for the given CodeBlob
120   static CodeHeap* get_code_heap(CodeBlobType code_blob_type);         // Returns the CodeHeap for the given CodeBlobType
121   // Returns the name of the VM option to set the size of the corresponding CodeHeap
122   static const char* get_code_heap_flag_name(CodeBlobType code_blob_type);
123   static ReservedCodeSpace reserve_heap_memory(size_t size, size_t rs_ps); // Reserves one continuous chunk of memory for the CodeHeaps
124 
125   // Iteration
126   static CodeBlob* first_blob(CodeHeap* heap);                // Returns the first CodeBlob on the given CodeHeap
127   static CodeBlob* first_blob(CodeBlobType code_blob_type);            // Returns the first CodeBlob of the given type
128   static CodeBlob* next_blob(CodeHeap* heap, CodeBlob* cb);   // Returns the next CodeBlob on the given CodeHeap
129 
130  private:
131   static size_t bytes_allocated_in_freelists();
132   static int    allocated_segments();
133   static size_t freelists_length();
134 
135   // Make private to prevent unsafe calls.  Not all CodeBlob*'s are embedded in a CodeHeap.
136   static bool contains(CodeBlob *p) { fatal("don't call me!"); return false; }
137 
138  public:
139   // Initialization
140   static void initialize();
141   static size_t page_size(bool aligned = true, size_t min_pages = 1); // Returns the page size used by the CodeCache
142 
143   static int code_heap_compare(CodeHeap* const &lhs, CodeHeap* const &rhs);
144 
145   static void add_heap(CodeHeap* heap);
146   static const GrowableArray<CodeHeap*>* heaps() { return _heaps; }
147   static const GrowableArray<CodeHeap*>* compiled_heaps() { return _compiled_heaps; }
148   static const GrowableArray<CodeHeap*>* nmethod_heaps() { return _nmethod_heaps; }
149 
150   static void* map_cached_code();
151   // Allocation/administration
152   static CodeBlob* allocate(uint size, CodeBlobType code_blob_type, bool handle_alloc_failure = true, CodeBlobType orig_code_blob_type = CodeBlobType::All); // allocates a new CodeBlob
153   static void commit(CodeBlob* cb);                        // called when the allocated CodeBlob has been filled
154   static void free(CodeBlob* cb);                          // frees a CodeBlob
155   static void free_unused_tail(CodeBlob* cb, size_t used); // frees the unused tail of a CodeBlob (only used by TemplateInterpreter::initialize())
156   static bool contains(void *p);                           // returns whether p is included
157   static bool contains(nmethod* nm);                       // returns whether nm is included
158   static void blobs_do(void f(CodeBlob* cb));              // iterates over all CodeBlobs
159   static void blobs_do(CodeBlobClosure* f);                // iterates over all CodeBlobs
160   static void nmethods_do(void f(nmethod* nm));            // iterates over all nmethods
161   static void metadata_do(MetadataClosure* f);             // iterates over metadata in alive nmethods
162 
163   // Lookup
164   static CodeBlob* find_blob(void* start);              // Returns the CodeBlob containing the given address
165   static CodeBlob* find_blob_fast(void* start);         // Returns the CodeBlob containing the given address
166   static CodeBlob* find_blob_and_oopmap(void* start, int& slot);         // Returns the CodeBlob containing the given address
167   static int find_oopmap_slot_fast(void* start);        // Returns a fast oopmap slot if there is any; -1 otherwise
168   static nmethod*  find_nmethod(void* start);           // Returns the nmethod containing the given address
169   static CompiledMethod* find_compiled(void* start);
170 
171   static int       blob_count();                        // Returns the total number of CodeBlobs in the cache
172   static int       blob_count(CodeBlobType code_blob_type);
173   static int       adapter_count();                     // Returns the total number of Adapters in the cache
174   static int       adapter_count(CodeBlobType code_blob_type);
175   static int       nmethod_count();                     // Returns the total number of nmethods in the cache
176   static int       nmethod_count(CodeBlobType code_blob_type);
177 
178   // GC support
179   static void verify_oops();
180 
181   // Helper scope object managing code cache unlinking behavior, i.e. sets and
182   // restores the closure that determines which nmethods are going to be removed
183   // during the unlinking part of code cache unloading.
184   class UnlinkingScope : StackObj {
185     ClosureIsUnloadingBehaviour _is_unloading_behaviour;
186     IsUnloadingBehaviour*       _saved_behaviour;
187 
188   public:
189     UnlinkingScope(BoolObjectClosure* is_alive);
190     ~UnlinkingScope();
191   };
192 
193   // Code cache unloading heuristics
194   static uint64_t cold_gc_count();
195   static void update_cold_gc_count();
196   static void gc_on_allocation();
197 
198   // The GC epoch and marking_cycle code below is there to support sweeping
199   // nmethods in loom stack chunks.
200   static uint64_t gc_epoch();
201   static bool is_gc_marking_cycle_active();
202   static uint64_t previous_completed_gc_marking_cycle();
203   static void on_gc_marking_cycle_start();
204   static void on_gc_marking_cycle_finish();
205   // Arm nmethods so that special actions are taken (nmethod_entry_barrier) for
206   // on-stack nmethods. It's used in two places:
207   // 1. Used before the start of concurrent marking so that oops inside
208   //    on-stack nmethods are visited.
209   // 2. Used at the end of (stw/concurrent) marking so that nmethod::_gc_epoch
210   //    is up-to-date, which provides more accurate estimate of
211   //    nmethod::is_cold.
212   static void arm_all_nmethods();
213 
214   static void maybe_restart_compiler(size_t freed_memory);
215   static void do_unloading(bool unloading_occurred);
216   static uint8_t unloading_cycle() { return _unloading_cycle; }
217 
218   static void increment_unloading_cycle();
219 
220   static void release_exception_cache(ExceptionCache* entry);
221   static void purge_exception_caches();
222 
223   // Printing/debugging
224   static void print();                           // prints summary
225   static void print_internals();
226   static void print_nmethods_on(outputStream* st);
227   static void print_memory_overhead();
228   static void verify();                          // verifies the code cache
229   static void print_trace(const char* event, CodeBlob* cb, uint size = 0) PRODUCT_RETURN;
230   static void print_summary(outputStream* st, bool detailed = true); // Prints a summary of the code cache usage
231   static void log_state(outputStream* st);
232   LINUX_ONLY(static void write_perf_map(const char* filename = nullptr);)
233   static const char* get_code_heap_name(CodeBlobType code_blob_type)  { return (heap_available(code_blob_type) ? get_code_heap(code_blob_type)->name() : "Unused"); }
234   static void report_codemem_full(CodeBlobType code_blob_type, bool print);
235 
236   static void print_nmethod_statistics_on(outputStream* st);
237 
238   // Dcmd (Diagnostic commands)
239   static void print_codelist(outputStream* st);
240   static void print_layout(outputStream* st);
241 
242   // The full limits of the codeCache
243   static address low_bound()                          { return _low_bound; }
244   static address low_bound(CodeBlobType code_blob_type);
245   static address high_bound()                         { return _high_bound; }
246   static address high_bound(CodeBlobType code_blob_type);
247 
248   // Profiling
249   static size_t capacity();
250   static size_t unallocated_capacity(CodeBlobType code_blob_type);
251   static size_t unallocated_capacity();
252   static size_t max_capacity();
253 
254   static double reverse_free_ratio();
255 
256   static size_t max_distance_to_non_nmethod();
257   static bool is_non_nmethod(address addr);
258 
259   static void clear_inline_caches();                  // clear all inline caches
260   static void cleanup_inline_caches_whitebox();       // clean bad nmethods from inline caches
261 
262   // Returns true if an own CodeHeap for the given CodeBlobType is available
263   static bool heap_available(CodeBlobType code_blob_type);
264 
265   // Returns the CodeBlobType for the given CompiledMethod
266   static CodeBlobType get_code_blob_type(CompiledMethod* cm) {
267     return get_code_heap(cm)->code_blob_type();
268   }
269 
270   static bool code_blob_type_accepts_compiled(CodeBlobType code_blob_type) {
271     bool result = code_blob_type == CodeBlobType::All || code_blob_type <= CodeBlobType::MethodProfiled;
272     return result;
273   }
274 
275   static bool code_blob_type_accepts_nmethod(CodeBlobType type) {
276     return type == CodeBlobType::All || type <= CodeBlobType::MethodProfiled;
277   }
278 
279   static bool code_blob_type_accepts_allocable(CodeBlobType type) {
280     return type <= CodeBlobType::All;
281   }
282 
283 
284   // Returns the CodeBlobType for the given compilation level
285   static CodeBlobType get_code_blob_type(int comp_level) {
286     if (comp_level == CompLevel_none ||
287         comp_level == CompLevel_simple ||
288         comp_level == CompLevel_full_optimization) {
289       // Non profiled methods
290       return CodeBlobType::MethodNonProfiled;
291     } else if (comp_level == CompLevel_limited_profile ||
292                comp_level == CompLevel_full_profile) {
293       // Profiled methods
294       return CodeBlobType::MethodProfiled;
295     }
296     ShouldNotReachHere();
297     return static_cast<CodeBlobType>(0);
298   }
299 
300   static void verify_clean_inline_caches();
301 
302   // Deoptimization
303  private:
304   static void mark_for_deoptimization(DeoptimizationScope* deopt_scope, KlassDepChange& changes);
305 
306  public:
307   static void mark_all_nmethods_for_deoptimization(DeoptimizationScope* deopt_scope);
308   static void mark_for_deoptimization(DeoptimizationScope* deopt_scope, Method* dependee);
309   static void make_marked_nmethods_deoptimized();
310 
311   // Marks dependents during classloading
312   static void mark_dependents_on(DeoptimizationScope* deopt_scope, InstanceKlass* dependee);
313 
314   // RedefineClasses support
315   // Marks in case of evolution
316   static void mark_dependents_for_evol_deoptimization(DeoptimizationScope* deopt_scope);
317   static void mark_all_nmethods_for_evol_deoptimization(DeoptimizationScope* deopt_scope);
318   static void old_nmethods_do(MetadataClosure* f) NOT_JVMTI_RETURN;
319   static void unregister_old_nmethod(CompiledMethod* c) NOT_JVMTI_RETURN;
320 
321   // Support for fullspeed debugging
322   static void mark_dependents_on_method_for_breakpoint(const methodHandle& dependee);
323 
324   // tells if there are nmethods with dependencies
325   static bool has_nmethods_with_dependencies();
326 
327   static int get_codemem_full_count(CodeBlobType code_blob_type) {
328     CodeHeap* heap = get_code_heap(code_blob_type);
329     return (heap != nullptr) ? heap->full_count() : 0;
330   }
331 
332   // CodeHeap State Analytics.
333   // interface methods for CodeHeap printing, called by CompileBroker
334   static void aggregate(outputStream *out, size_t granularity);
335   static void discard(outputStream *out);
336   static void print_usedSpace(outputStream *out);
337   static void print_freeSpace(outputStream *out);
338   static void print_count(outputStream *out);
339   static void print_space(outputStream *out);
340   static void print_age(outputStream *out);
341   static void print_names(outputStream *out);
342 };
343 
344 
345 // Iterator to iterate over code blobs in the CodeCache.
346 // The relaxed iterators only hold the CodeCache_lock across next calls
347 template <class T, class Filter, bool is_relaxed> class CodeBlobIterator : public StackObj {
348  public:
349   enum LivenessFilter { all_blobs, only_not_unloading };
350 
351  private:
352   CodeBlob* _code_blob;   // Current CodeBlob
353   GrowableArrayIterator<CodeHeap*> _heap;
354   GrowableArrayIterator<CodeHeap*> _end;
355   bool _only_not_unloading;
356 
357   void initialize_iteration(T* nm) {
358   }
359 
360   bool next_impl() {
361     for (;;) {
362       // Walk through heaps as required
363       if (!next_blob()) {
364         if (_heap == _end) {
365           return false;
366         }
367         ++_heap;
368         continue;
369       }
370 
371       // Filter is_unloading as required
372       if (_only_not_unloading) {
373         CompiledMethod* cm = _code_blob->as_compiled_method_or_null();
374         if (cm != nullptr && cm->is_unloading()) {
375           continue;
376         }
377       }
378 
379       return true;
380     }
381   }
382 
383  public:
384   CodeBlobIterator(LivenessFilter filter, T* nm = nullptr)
385     : _only_not_unloading(filter == only_not_unloading)
386   {
387     if (Filter::heaps() == nullptr) {
388       // The iterator is supposed to shortcut since we have
389       // _heap == _end, but make sure we do not have garbage
390       // in other fields as well.
391       _code_blob = nullptr;
392       return;
393     }
394     _heap = Filter::heaps()->begin();
395     _end = Filter::heaps()->end();
396     // If set to nullptr, initialized by first call to next()
397     _code_blob = nm;
398     if (nm != nullptr) {
399       while(!(*_heap)->contains(_code_blob)) {
400         ++_heap;
401       }
402       assert((*_heap)->contains(_code_blob), "match not found");
403     }
404   }
405 
406   // Advance iterator to next blob
407   bool next() {
408     if (is_relaxed) {
409       MutexLocker ml(CodeCache_lock, Mutex::_no_safepoint_check_flag);
410       return next_impl();
411     } else {
412       assert_locked_or_safepoint(CodeCache_lock);
413       return next_impl();
414     }
415   }
416 
417   bool end()  const { return _code_blob == nullptr; }
418   T* method() const { return (T*)_code_blob; }
419 
420 private:
421 
422   // Advance iterator to the next blob in the current code heap
423   bool next_blob() {
424     if (_heap == _end) {
425       return false;
426     }
427     CodeHeap *heap = *_heap;
428     // Get first method CodeBlob
429     if (_code_blob == nullptr) {
430       _code_blob = CodeCache::first_blob(heap);
431       if (_code_blob == nullptr) {
432         return false;
433       } else if (Filter::apply(_code_blob)) {
434         return true;
435       }
436     }
437     // Search for next method CodeBlob
438     _code_blob = CodeCache::next_blob(heap, _code_blob);
439     while (_code_blob != nullptr && !Filter::apply(_code_blob)) {
440       _code_blob = CodeCache::next_blob(heap, _code_blob);
441     }
442     return _code_blob != nullptr;
443   }
444 };
445 
446 struct CompiledMethodFilter {
447   static bool apply(CodeBlob* cb) { return cb->is_compiled(); }
448   static const GrowableArray<CodeHeap*>* heaps() { return CodeCache::compiled_heaps(); }
449 };
450 
451 
452 struct NMethodFilter {
453   static bool apply(CodeBlob* cb) { return cb->is_nmethod(); }
454   static const GrowableArray<CodeHeap*>* heaps() { return CodeCache::nmethod_heaps(); }
455 };
456 
457 struct AllCodeBlobsFilter {
458   static bool apply(CodeBlob* cb) { return true; }
459   static const GrowableArray<CodeHeap*>* heaps() { return CodeCache::heaps(); }
460 };
461 
462 typedef CodeBlobIterator<CompiledMethod, CompiledMethodFilter, false /* is_relaxed */> CompiledMethodIterator;
463 typedef CodeBlobIterator<CompiledMethod, CompiledMethodFilter, true /* is_relaxed */> RelaxedCompiledMethodIterator;
464 typedef CodeBlobIterator<nmethod, NMethodFilter, false /* is_relaxed */> NMethodIterator;
465 typedef CodeBlobIterator<CodeBlob, AllCodeBlobsFilter, false /* is_relaxed */> AllCodeBlobsIterator;
466 
467 #endif // SHARE_CODE_CODECACHE_HPP