1 /*
  2  * Copyright (c) 2016, 2025, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "code/codeCache.hpp"
 26 #include "compiler/compilerDefinitions.inline.hpp"
 27 #include "interpreter/invocationCounter.hpp"
 28 #include "jvm_io.h"
 29 #include "runtime/arguments.hpp"
 30 #include "runtime/continuation.hpp"
 31 #include "runtime/flags/jvmFlag.hpp"
 32 #include "runtime/flags/jvmFlagAccess.hpp"
 33 #include "runtime/flags/jvmFlagConstraintsCompiler.hpp"
 34 #include "runtime/flags/jvmFlagLimit.hpp"
 35 #include "runtime/globals.hpp"
 36 #include "runtime/globals_extension.hpp"
 37 #include "utilities/defaultStream.hpp"
 38 
 39 const char* compilertype2name_tab[compiler_number_of_types] = {
 40   "",
 41   "c1",
 42   "c2",
 43   "jvmci"
 44 };
 45 
 46 CompilationModeFlag::Mode CompilationModeFlag::_mode = CompilationModeFlag::Mode::NORMAL;
 47 
 48 static void print_mode_unavailable(const char* mode_name, const char* reason) {
 49   warning("%s compilation mode unavailable because %s.", mode_name, reason);
 50 }
 51 
 52 bool CompilationModeFlag::initialize() {
 53   _mode = Mode::NORMAL;
 54   // During parsing we want to be very careful not to use any methods of CompilerConfig that depend on
 55   // CompilationModeFlag.
 56   if (CompilationMode != nullptr) {
 57     if (strcmp(CompilationMode, "default") == 0 || strcmp(CompilationMode, "normal") == 0) {
 58       assert(_mode == Mode::NORMAL, "Precondition");
 59     } else if (strcmp(CompilationMode, "quick-only") == 0) {
 60       if (!CompilerConfig::has_c1()) {
 61         print_mode_unavailable("quick-only", "there is no c1 present");
 62       } else {
 63         _mode = Mode::QUICK_ONLY;
 64       }
 65     } else if (strcmp(CompilationMode, "high-only") == 0) {
 66       if (!CompilerConfig::has_c2() && !CompilerConfig::is_jvmci_compiler()) {
 67         print_mode_unavailable("high-only", "there is no c2 or jvmci compiler present");
 68       } else {
 69         _mode = Mode::HIGH_ONLY;
 70       }
 71     } else if (strcmp(CompilationMode, "high-only-quick-internal") == 0) {
 72       if (!CompilerConfig::has_c1() || !CompilerConfig::is_jvmci_compiler()) {
 73         print_mode_unavailable("high-only-quick-internal", "there is no c1 and jvmci compiler present");
 74       } else {
 75         _mode = Mode::HIGH_ONLY_QUICK_INTERNAL;
 76       }
 77     } else {
 78       print_error();
 79       return false;
 80     }
 81   }
 82 
 83   // Now that the flag is parsed, we can use any methods of CompilerConfig.
 84   if (normal()) {
 85     if (CompilerConfig::is_c1_simple_only()) {
 86       _mode = Mode::QUICK_ONLY;
 87     } else if (CompilerConfig::is_c2_or_jvmci_compiler_only()) {
 88       _mode = Mode::HIGH_ONLY;
 89     } else if (CompilerConfig::is_jvmci_compiler_enabled() && CompilerConfig::is_c1_enabled() && !TieredCompilation) {
 90       warning("Disabling tiered compilation with non-native JVMCI compiler is not recommended, "
 91               "disabling intermediate compilation levels instead. ");
 92       _mode = Mode::HIGH_ONLY_QUICK_INTERNAL;
 93     }
 94   }
 95   return true;
 96 }
 97 
 98 void CompilationModeFlag::print_error() {
 99   jio_fprintf(defaultStream::error_stream(), "Unsupported compilation mode '%s', available modes are:", CompilationMode);
100   bool comma = false;
101   if (CompilerConfig::has_c1()) {
102     jio_fprintf(defaultStream::error_stream(), "%s quick-only", comma ? "," : "");
103     comma = true;
104   }
105   if (CompilerConfig::has_c2() || CompilerConfig::has_jvmci()) {
106     jio_fprintf(defaultStream::error_stream(), "%s high-only", comma ? "," : "");
107     comma = true;
108   }
109   if (CompilerConfig::has_c1() && CompilerConfig::has_jvmci()) {
110     jio_fprintf(defaultStream::error_stream(), "%s high-only-quick-internal", comma ? "," : "");
111     comma = true;
112   }
113   jio_fprintf(defaultStream::error_stream(), "\n");
114 }
115 
116 // Returns threshold scaled with CompileThresholdScaling
117 intx CompilerConfig::scaled_compile_threshold(intx threshold) {
118   return scaled_compile_threshold(threshold, CompileThresholdScaling);
119 }
120 
121 // Returns freq_log scaled with CompileThresholdScaling
122 intx CompilerConfig::scaled_freq_log(intx freq_log) {
123   return scaled_freq_log(freq_log, CompileThresholdScaling);
124 }
125 
126 // For XXXThreshold flags, which all have a valid range of [0 .. max_jint]
127 intx CompilerConfig::jvmflag_scaled_compile_threshold(intx threshold) {
128   return MAX2((intx)0, MIN2(scaled_compile_threshold(threshold), (intx)max_jint));
129 }
130 
131 // For XXXNotifyFreqLog flags, which all have a valid range of [0 .. 30]
132 intx CompilerConfig::jvmflag_scaled_freq_log(intx freq_log) {
133   return MAX2((intx)0, MIN2(scaled_freq_log(freq_log), (intx)30));
134 }
135 
136 // Returns threshold scaled with the value of scale.
137 // If scale < 0.0, threshold is returned without scaling.
138 intx CompilerConfig::scaled_compile_threshold(intx threshold, double scale) {
139   assert(threshold >= 0, "must be");
140   if (scale == 1.0 || scale < 0.0) {
141     return threshold;
142   } else {
143     double v = threshold * scale;
144     assert(v >= 0, "must be");
145     if (g_isnan(v) || !g_isfinite(v)) {
146       return max_intx;
147     }
148     int exp;
149     (void) frexp(v, &exp);
150     int max_exp = sizeof(intx) * BitsPerByte - 1;
151     if (exp > max_exp) {
152       return max_intx;
153     }
154     intx r = (intx)(v);
155     assert(r >= 0, "must be");
156     return r;
157   }
158 }
159 
160 // Returns freq_log scaled with the value of scale.
161 // Returned values are in the range of [0, InvocationCounter::number_of_count_bits + 1].
162 // If scale < 0.0, freq_log is returned without scaling.
163 intx CompilerConfig::scaled_freq_log(intx freq_log, double scale) {
164   // Check if scaling is necessary or if negative value was specified.
165   if (scale == 1.0 || scale < 0.0) {
166     return freq_log;
167   }
168   // Check values to avoid calculating log2 of 0.
169   if (scale == 0.0 || freq_log == 0) {
170     return 0;
171   }
172   // Determine the maximum notification frequency value currently supported.
173   // The largest mask value that the interpreter/C1 can handle is
174   // of length InvocationCounter::number_of_count_bits. Mask values are always
175   // one bit shorter then the value of the notification frequency. Set
176   // max_freq_bits accordingly.
177   int max_freq_bits = InvocationCounter::number_of_count_bits + 1;
178   intx scaled_freq = scaled_compile_threshold((intx)1 << freq_log, scale);
179 
180   if (scaled_freq == 0) {
181     // Return 0 right away to avoid calculating log2 of 0.
182     return 0;
183   } else {
184     return MIN2(log2i(scaled_freq), max_freq_bits);
185   }
186 }
187 
188 void CompilerConfig::set_client_emulation_mode_flags() {
189   assert(has_c1(), "Must have C1 compiler present");
190   CompilationModeFlag::set_quick_only();
191 
192   FLAG_SET_ERGO(ProfileInterpreter, false);
193 #if INCLUDE_JVMCI
194   FLAG_SET_ERGO(EnableJVMCI, false);
195   FLAG_SET_ERGO(UseJVMCICompiler, false);
196 #endif
197   if (FLAG_IS_DEFAULT(NeverActAsServerClassMachine)) {
198     FLAG_SET_ERGO(NeverActAsServerClassMachine, true);
199   }
200   if (FLAG_IS_DEFAULT(InitialCodeCacheSize)) {
201     FLAG_SET_ERGO(InitialCodeCacheSize, 160*K);
202   }
203   if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) {
204     FLAG_SET_ERGO(ReservedCodeCacheSize, 32*M);
205   }
206   if (FLAG_IS_DEFAULT(NonProfiledCodeHeapSize)) {
207     FLAG_SET_ERGO(NonProfiledCodeHeapSize, 27*M);
208   }
209   if (FLAG_IS_DEFAULT(ProfiledCodeHeapSize)) {
210     FLAG_SET_ERGO(ProfiledCodeHeapSize, 0);
211   }
212   if (FLAG_IS_DEFAULT(NonNMethodCodeHeapSize)) {
213     FLAG_SET_ERGO(NonNMethodCodeHeapSize, 5*M);
214   }
215   if (FLAG_IS_DEFAULT(CodeCacheExpansionSize)) {
216     FLAG_SET_ERGO(CodeCacheExpansionSize, 32*K);
217   }
218   if (FLAG_IS_DEFAULT(MaxRAM)) {
219     // Do not use FLAG_SET_ERGO to update MaxRAM, as this will impact
220     // heap setting done based on available phys_mem (see Arguments::set_heap_size).
221     FLAG_SET_DEFAULT(MaxRAM, 1ULL*G);
222   }
223   if (FLAG_IS_DEFAULT(CICompilerCount)) {
224     FLAG_SET_ERGO(CICompilerCount, 1);
225   }
226 }
227 
228 bool CompilerConfig::is_compilation_mode_selected() {
229   return !FLAG_IS_DEFAULT(TieredCompilation) ||
230          !FLAG_IS_DEFAULT(TieredStopAtLevel) ||
231          !FLAG_IS_DEFAULT(CompilationMode)
232          JVMCI_ONLY(|| !FLAG_IS_DEFAULT(EnableJVMCI)
233                     || !FLAG_IS_DEFAULT(UseJVMCICompiler));
234 }
235 
236 static bool check_legacy_flags() {
237   JVMFlag* compile_threshold_flag = JVMFlag::flag_from_enum(FLAG_MEMBER_ENUM(CompileThreshold));
238   if (JVMFlagAccess::check_constraint(compile_threshold_flag, JVMFlagLimit::get_constraint(compile_threshold_flag)->constraint_func(), false) != JVMFlag::SUCCESS) {
239     return false;
240   }
241   JVMFlag* on_stack_replace_percentage_flag = JVMFlag::flag_from_enum(FLAG_MEMBER_ENUM(OnStackReplacePercentage));
242   if (JVMFlagAccess::check_constraint(on_stack_replace_percentage_flag, JVMFlagLimit::get_constraint(on_stack_replace_percentage_flag)->constraint_func(), false) != JVMFlag::SUCCESS) {
243     return false;
244   }
245   JVMFlag* interpreter_profile_percentage_flag = JVMFlag::flag_from_enum(FLAG_MEMBER_ENUM(InterpreterProfilePercentage));
246   if (JVMFlagAccess::check_range(interpreter_profile_percentage_flag, false) != JVMFlag::SUCCESS) {
247     return false;
248   }
249   return true;
250 }
251 
252 void CompilerConfig::set_legacy_emulation_flags() {
253   // Any legacy flags set?
254   if (!FLAG_IS_DEFAULT(CompileThreshold)         ||
255       !FLAG_IS_DEFAULT(OnStackReplacePercentage) ||
256       !FLAG_IS_DEFAULT(InterpreterProfilePercentage)) {
257     if (CompilerConfig::is_c1_only() || CompilerConfig::is_c2_or_jvmci_compiler_only()) {
258       // This function is called before these flags are validated. In order to not confuse the user with extraneous
259       // error messages, we check the validity of these flags here and bail out if any of them are invalid.
260       if (!check_legacy_flags()) {
261         return;
262       }
263       // Note, we do not scale CompileThreshold before this because the tiered flags are
264       // all going to be scaled further in set_compilation_policy_flags().
265       const intx threshold = CompileThreshold;
266       const intx profile_threshold = threshold * InterpreterProfilePercentage / 100;
267       const intx osr_threshold = threshold * OnStackReplacePercentage / 100;
268       const intx osr_profile_threshold = osr_threshold * InterpreterProfilePercentage / 100;
269 
270       const intx threshold_log = log2i_graceful(CompilerConfig::is_c1_only() ? threshold : profile_threshold);
271       const intx osr_threshold_log = log2i_graceful(CompilerConfig::is_c1_only() ? osr_threshold : osr_profile_threshold);
272 
273       if (Tier0InvokeNotifyFreqLog > threshold_log) {
274         FLAG_SET_ERGO(Tier0InvokeNotifyFreqLog, MAX2<intx>(0, threshold_log));
275       }
276 
277       // Note: Emulation oddity. The legacy policy limited the amount of callbacks from the
278       // interpreter for backedge events to once every 1024 counter increments.
279       // We simulate this behavior by limiting the backedge notification frequency to be
280       // at least 2^10.
281       if (Tier0BackedgeNotifyFreqLog > osr_threshold_log) {
282         FLAG_SET_ERGO(Tier0BackedgeNotifyFreqLog, MAX2<intx>(10, osr_threshold_log));
283       }
284       // Adjust the tiered policy flags to approximate the legacy behavior.
285       FLAG_SET_ERGO(Tier3InvocationThreshold, threshold);
286       FLAG_SET_ERGO(Tier3MinInvocationThreshold, threshold);
287       FLAG_SET_ERGO(Tier3CompileThreshold, threshold);
288       FLAG_SET_ERGO(Tier3BackEdgeThreshold, osr_threshold);
289       if (CompilerConfig::is_c2_or_jvmci_compiler_only()) {
290         FLAG_SET_ERGO(Tier4InvocationThreshold, threshold);
291         FLAG_SET_ERGO(Tier4MinInvocationThreshold, threshold);
292         FLAG_SET_ERGO(Tier4CompileThreshold, threshold);
293         FLAG_SET_ERGO(Tier4BackEdgeThreshold, osr_threshold);
294         FLAG_SET_ERGO(Tier0ProfilingStartPercentage, InterpreterProfilePercentage);
295       }
296     } else {
297       // Normal tiered mode, ignore legacy flags
298     }
299   }
300   // Scale CompileThreshold
301   // CompileThresholdScaling == 0.0 is equivalent to -Xint and leaves CompileThreshold unchanged.
302   if (!FLAG_IS_DEFAULT(CompileThresholdScaling) && CompileThresholdScaling > 0.0 && CompileThreshold > 0) {
303     intx scaled_value = scaled_compile_threshold(CompileThreshold);
304     if (CompileThresholdConstraintFunc(scaled_value, true) != JVMFlag::VIOLATES_CONSTRAINT) {
305       FLAG_SET_ERGO(CompileThreshold, scaled_value);
306     }
307   }
308 }
309 
310 
311 void CompilerConfig::set_compilation_policy_flags() {
312   if (is_tiered()) {
313     // Increase the code cache size - tiered compiles a lot more.
314     if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) {
315       FLAG_SET_ERGO(ReservedCodeCacheSize,
316                     MIN2(CODE_CACHE_DEFAULT_LIMIT, (size_t)ReservedCodeCacheSize * 5));
317     }
318     // Enable SegmentedCodeCache if tiered compilation is enabled, ReservedCodeCacheSize >= 240M
319     // and the code cache contains at least 8 pages (segmentation disables advantage of huge pages).
320     if (FLAG_IS_DEFAULT(SegmentedCodeCache) && ReservedCodeCacheSize >= 240*M &&
321         8 * CodeCache::page_size() <= ReservedCodeCacheSize) {
322       FLAG_SET_ERGO(SegmentedCodeCache, true);
323     }
324     if (Arguments::is_compiler_only()) { // -Xcomp
325       // Be much more aggressive in tiered mode with -Xcomp and exercise C2 more.
326       // We will first compile a level 3 version (C1 with full profiling), then do one invocation of it and
327       // compile a level 4 (C2) and then continue executing it.
328       if (FLAG_IS_DEFAULT(Tier3InvokeNotifyFreqLog)) {
329         FLAG_SET_CMDLINE(Tier3InvokeNotifyFreqLog, 0);
330       }
331       if (FLAG_IS_DEFAULT(Tier4InvocationThreshold)) {
332         FLAG_SET_CMDLINE(Tier4InvocationThreshold, 0);
333       }
334     }
335   }
336 
337   if (CompileThresholdScaling < 0) {
338     vm_exit_during_initialization("Negative value specified for CompileThresholdScaling", nullptr);
339   }
340 
341   if (CompilationModeFlag::disable_intermediate()) {
342     if (FLAG_IS_DEFAULT(Tier0ProfilingStartPercentage)) {
343       FLAG_SET_DEFAULT(Tier0ProfilingStartPercentage, 33);
344     }
345 
346     if (FLAG_IS_DEFAULT(Tier4InvocationThreshold)) {
347       FLAG_SET_DEFAULT(Tier4InvocationThreshold, 5000);
348     }
349     if (FLAG_IS_DEFAULT(Tier4MinInvocationThreshold)) {
350       FLAG_SET_DEFAULT(Tier4MinInvocationThreshold, 600);
351     }
352     if (FLAG_IS_DEFAULT(Tier4CompileThreshold)) {
353       FLAG_SET_DEFAULT(Tier4CompileThreshold, 10000);
354     }
355     if (FLAG_IS_DEFAULT(Tier4BackEdgeThreshold)) {
356       FLAG_SET_DEFAULT(Tier4BackEdgeThreshold, 15000);
357     }
358 
359     if (FLAG_IS_DEFAULT(Tier3InvocationThreshold)) {
360       FLAG_SET_DEFAULT(Tier3InvocationThreshold, Tier4InvocationThreshold);
361     }
362     if (FLAG_IS_DEFAULT(Tier3MinInvocationThreshold)) {
363       FLAG_SET_DEFAULT(Tier3MinInvocationThreshold, Tier4MinInvocationThreshold);
364     }
365     if (FLAG_IS_DEFAULT(Tier3CompileThreshold)) {
366       FLAG_SET_DEFAULT(Tier3CompileThreshold, Tier4CompileThreshold);
367     }
368     if (FLAG_IS_DEFAULT(Tier3BackEdgeThreshold)) {
369       FLAG_SET_DEFAULT(Tier3BackEdgeThreshold, Tier4BackEdgeThreshold);
370     }
371 
372   }
373 
374   // Scale tiered compilation thresholds.
375   // CompileThresholdScaling == 0.0 is equivalent to -Xint and leaves compilation thresholds unchanged.
376   if (!FLAG_IS_DEFAULT(CompileThresholdScaling) && CompileThresholdScaling > 0.0) {
377     FLAG_SET_ERGO(Tier0InvokeNotifyFreqLog, jvmflag_scaled_freq_log(Tier0InvokeNotifyFreqLog));
378     FLAG_SET_ERGO(Tier0BackedgeNotifyFreqLog, jvmflag_scaled_freq_log(Tier0BackedgeNotifyFreqLog));
379 
380     FLAG_SET_ERGO(Tier3InvocationThreshold, jvmflag_scaled_compile_threshold(Tier3InvocationThreshold));
381     FLAG_SET_ERGO(Tier3MinInvocationThreshold, jvmflag_scaled_compile_threshold(Tier3MinInvocationThreshold));
382     FLAG_SET_ERGO(Tier3CompileThreshold, jvmflag_scaled_compile_threshold(Tier3CompileThreshold));
383     FLAG_SET_ERGO(Tier3BackEdgeThreshold, jvmflag_scaled_compile_threshold(Tier3BackEdgeThreshold));
384 
385     // Tier2{Invocation,MinInvocation,Compile,Backedge}Threshold should be scaled here
386     // once these thresholds become supported.
387 
388     FLAG_SET_ERGO(Tier2InvokeNotifyFreqLog, jvmflag_scaled_freq_log(Tier2InvokeNotifyFreqLog));
389     FLAG_SET_ERGO(Tier2BackedgeNotifyFreqLog, jvmflag_scaled_freq_log(Tier2BackedgeNotifyFreqLog));
390 
391     FLAG_SET_ERGO(Tier3InvokeNotifyFreqLog, jvmflag_scaled_freq_log(Tier3InvokeNotifyFreqLog));
392     FLAG_SET_ERGO(Tier3BackedgeNotifyFreqLog, jvmflag_scaled_freq_log(Tier3BackedgeNotifyFreqLog));
393 
394     FLAG_SET_ERGO(Tier23InlineeNotifyFreqLog, jvmflag_scaled_freq_log(Tier23InlineeNotifyFreqLog));
395 
396     FLAG_SET_ERGO(Tier4InvocationThreshold, jvmflag_scaled_compile_threshold(Tier4InvocationThreshold));
397     FLAG_SET_ERGO(Tier4MinInvocationThreshold, jvmflag_scaled_compile_threshold(Tier4MinInvocationThreshold));
398     FLAG_SET_ERGO(Tier4CompileThreshold, jvmflag_scaled_compile_threshold(Tier4CompileThreshold));
399     FLAG_SET_ERGO(Tier4BackEdgeThreshold, jvmflag_scaled_compile_threshold(Tier4BackEdgeThreshold));
400   }
401 
402 #ifdef COMPILER1
403   // Reduce stack usage due to inlining of methods which require much stack.
404   // (High tier compiler can inline better based on profiling information.)
405   if (FLAG_IS_DEFAULT(C1InlineStackLimit) &&
406       TieredStopAtLevel == CompLevel_full_optimization && !CompilerConfig::is_c1_only()) {
407     FLAG_SET_DEFAULT(C1InlineStackLimit, 5);
408   }
409 #endif
410 
411   if (CompilerConfig::is_tiered() && CompilerConfig::is_c2_enabled()) {
412 #ifdef COMPILER2
413     // Some inlining tuning
414 #if defined(X86) || defined(AARCH64) || defined(RISCV64)
415     if (FLAG_IS_DEFAULT(InlineSmallCode)) {
416       FLAG_SET_DEFAULT(InlineSmallCode, 2500);
417     }
418 #endif
419 #endif // COMPILER2
420   }
421 
422 }
423 
424 #if INCLUDE_JVMCI
425 void CompilerConfig::set_jvmci_specific_flags() {
426   if (UseJVMCICompiler) {
427     if (FLAG_IS_DEFAULT(TypeProfileWidth)) {
428       FLAG_SET_DEFAULT(TypeProfileWidth, 8);
429     }
430     if (FLAG_IS_DEFAULT(TypeProfileLevel)) {
431       FLAG_SET_DEFAULT(TypeProfileLevel, 0);
432     }
433 
434     if (UseJVMCINativeLibrary) {
435       // SVM compiled code requires more stack space
436       if (FLAG_IS_DEFAULT(CompilerThreadStackSize)) {
437         // Duplicate logic in the implementations of os::create_thread
438         // so that we can then double the computed stack size. Once
439         // the stack size requirements of SVM are better understood,
440         // this logic can be pushed down into os::create_thread.
441         int stack_size = CompilerThreadStackSize;
442         if (stack_size == 0) {
443           stack_size = VMThreadStackSize;
444         }
445         if (stack_size != 0) {
446           FLAG_SET_DEFAULT(CompilerThreadStackSize, stack_size * 2);
447         }
448       }
449     } else {
450       // JVMCI needs values not less than defaults
451       if (FLAG_IS_DEFAULT(ReservedCodeCacheSize)) {
452         FLAG_SET_DEFAULT(ReservedCodeCacheSize, MAX2(64*M, ReservedCodeCacheSize));
453       }
454       if (FLAG_IS_DEFAULT(InitialCodeCacheSize)) {
455         FLAG_SET_DEFAULT(InitialCodeCacheSize, MAX2(16*M, InitialCodeCacheSize));
456       }
457       if (FLAG_IS_DEFAULT(NewSizeThreadIncrease)) {
458         FLAG_SET_DEFAULT(NewSizeThreadIncrease, MAX2(4*K, NewSizeThreadIncrease));
459       }
460       if (FLAG_IS_DEFAULT(Tier3DelayOn)) {
461         // This effectively prevents the compile broker scheduling tier 2
462         // (i.e., limited C1 profiling) compilations instead of tier 3
463         // (i.e., full C1 profiling) compilations when the tier 4 queue
464         // backs up (which is quite likely when using a non-AOT compiled JVMCI
465         // compiler). The observation based on jargraal is that the downside
466         // of skipping full profiling is much worse for performance than the
467         // queue backing up.
468         FLAG_SET_DEFAULT(Tier3DelayOn, 100000);
469       }
470     } // !UseJVMCINativeLibrary
471   } // UseJVMCICompiler
472 }
473 #endif // INCLUDE_JVMCI
474 
475 bool CompilerConfig::check_args_consistency(bool status) {
476   // Check lower bounds of the code cache
477   // Template Interpreter code is approximately 3X larger in debug builds.
478   uint min_code_cache_size = CodeCacheMinimumUseSpace DEBUG_ONLY(* 3);
479   if (ReservedCodeCacheSize < InitialCodeCacheSize) {
480     jio_fprintf(defaultStream::error_stream(),
481                 "Invalid ReservedCodeCacheSize: %dK. Must be at least InitialCodeCacheSize=%dK.\n",
482                 ReservedCodeCacheSize/K, InitialCodeCacheSize/K);
483     status = false;
484   } else if (ReservedCodeCacheSize < min_code_cache_size) {
485     jio_fprintf(defaultStream::error_stream(),
486                 "Invalid ReservedCodeCacheSize=%dK. Must be at least %uK.\n", ReservedCodeCacheSize/K,
487                 min_code_cache_size/K);
488     status = false;
489   } else if (ReservedCodeCacheSize > CODE_CACHE_SIZE_LIMIT) {
490     // Code cache size larger than CODE_CACHE_SIZE_LIMIT is not supported.
491     jio_fprintf(defaultStream::error_stream(),
492                 "Invalid ReservedCodeCacheSize=%dM. Must be at most %uM.\n", ReservedCodeCacheSize/M,
493                 CODE_CACHE_SIZE_LIMIT/M);
494     status = false;
495   } else if (NonNMethodCodeHeapSize < min_code_cache_size) {
496     jio_fprintf(defaultStream::error_stream(),
497                 "Invalid NonNMethodCodeHeapSize=%dK. Must be at least %uK.\n", NonNMethodCodeHeapSize/K,
498                 min_code_cache_size/K);
499     status = false;
500   }
501 
502 #ifdef _LP64
503   if (!FLAG_IS_DEFAULT(CICompilerCount) && !FLAG_IS_DEFAULT(CICompilerCountPerCPU) && CICompilerCountPerCPU) {
504     warning("The VM option CICompilerCountPerCPU overrides CICompilerCount.");
505   }
506 #endif
507 
508   if (BackgroundCompilation && ReplayCompiles) {
509     if (!FLAG_IS_DEFAULT(BackgroundCompilation)) {
510       warning("BackgroundCompilation disabled due to ReplayCompiles option.");
511     }
512     FLAG_SET_CMDLINE(BackgroundCompilation, false);
513   }
514 
515   if (CompilerConfig::is_interpreter_only()) {
516     if (UseCompiler) {
517       if (!FLAG_IS_DEFAULT(UseCompiler)) {
518         warning("UseCompiler disabled due to -Xint.");
519       }
520       FLAG_SET_CMDLINE(UseCompiler, false);
521     }
522     if (ProfileInterpreter) {
523       if (!FLAG_IS_DEFAULT(ProfileInterpreter)) {
524         warning("ProfileInterpreter disabled due to -Xint.");
525       }
526       FLAG_SET_CMDLINE(ProfileInterpreter, false);
527     }
528     if (TieredCompilation) {
529       if (!FLAG_IS_DEFAULT(TieredCompilation)) {
530         warning("TieredCompilation disabled due to -Xint.");
531       }
532       FLAG_SET_CMDLINE(TieredCompilation, false);
533     }
534     if (SegmentedCodeCache) {
535       warning("SegmentedCodeCache has no meaningful effect with -Xint");
536       FLAG_SET_DEFAULT(SegmentedCodeCache, false);
537     }
538 #if INCLUDE_JVMCI
539     if (EnableJVMCI || UseJVMCICompiler) {
540       if (!FLAG_IS_DEFAULT(EnableJVMCI) || !FLAG_IS_DEFAULT(UseJVMCICompiler)) {
541         warning("JVMCI Compiler disabled due to -Xint.");
542       }
543       FLAG_SET_CMDLINE(EnableJVMCI, false);
544       FLAG_SET_CMDLINE(UseJVMCICompiler, false);
545     }
546 #endif
547   } else {
548 #if INCLUDE_JVMCI
549     status = status && JVMCIGlobals::check_jvmci_flags_are_consistent();
550 #endif
551   }
552 
553   return status;
554 }
555 
556 void CompilerConfig::ergo_initialize() {
557 #if !COMPILER1_OR_COMPILER2
558   return;
559 #endif
560 
561   if (has_c1()) {
562     if (!is_compilation_mode_selected()) {
563       if (NeverActAsServerClassMachine) {
564         set_client_emulation_mode_flags();
565       }
566     } else if (!has_c2() && !is_jvmci_compiler()) {
567       set_client_emulation_mode_flags();
568     }
569   }
570 
571   set_legacy_emulation_flags();
572   set_compilation_policy_flags();
573 
574 #if INCLUDE_JVMCI
575   // Check that JVMCI supports selected GC.
576   // Should be done after GCConfig::initialize() was called.
577   JVMCIGlobals::check_jvmci_supported_gc();
578 
579   // Do JVMCI specific settings
580   set_jvmci_specific_flags();
581 #endif
582 
583   if (UseOnStackReplacement && !UseLoopCounter) {
584     warning("On-stack-replacement requires loop counters; enabling loop counters");
585     FLAG_SET_DEFAULT(UseLoopCounter, true);
586   }
587 
588   if (ProfileInterpreter && CompilerConfig::is_c1_simple_only()) {
589     if (!FLAG_IS_DEFAULT(ProfileInterpreter)) {
590         warning("ProfileInterpreter disabled due to client emulation mode");
591     }
592     FLAG_SET_CMDLINE(ProfileInterpreter, false);
593   }
594 
595 #ifdef COMPILER2
596   if (!EliminateLocks) {
597     EliminateNestedLocks = false;
598   }
599   if (!Inline || !IncrementalInline) {
600     IncrementalInline = false;
601     IncrementalInlineMH = false;
602     IncrementalInlineVirtual = false;
603     StressIncrementalInlining = false;
604   }
605 #ifndef PRODUCT
606   if (!IncrementalInline) {
607     AlwaysIncrementalInline = false;
608   }
609   if (FLAG_IS_CMDLINE(PrintIdealGraph) && !PrintIdealGraph) {
610     FLAG_SET_ERGO(PrintIdealGraphLevel, -1);
611   }
612 #endif
613   if (!UseTypeSpeculation && FLAG_IS_DEFAULT(TypeProfileLevel)) {
614     // nothing to use the profiling, turn if off
615     FLAG_SET_DEFAULT(TypeProfileLevel, 0);
616   }
617   if (!FLAG_IS_DEFAULT(OptoLoopAlignment) && FLAG_IS_DEFAULT(MaxLoopPad)) {
618     FLAG_SET_DEFAULT(MaxLoopPad, OptoLoopAlignment-1);
619   }
620   if (FLAG_IS_DEFAULT(LoopStripMiningIterShortLoop)) {
621     // blind guess
622     LoopStripMiningIterShortLoop = LoopStripMiningIter / 10;
623   }
624 #endif // COMPILER2
625 }