1 /* 2 * Copyright (c) 2000, 2023, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OPTO_CALLGENERATOR_HPP 26 #define SHARE_OPTO_CALLGENERATOR_HPP 27 28 #include "compiler/compileBroker.hpp" 29 #include "opto/callnode.hpp" 30 #include "opto/compile.hpp" 31 #include "opto/type.hpp" 32 #include "runtime/deoptimization.hpp" 33 34 //---------------------------CallGenerator------------------------------------- 35 // The subclasses of this class handle generation of ideal nodes for 36 // call sites and method entry points. 37 38 class CallGenerator : public ArenaObj { 39 private: 40 ciMethod* _method; // The method being called. 41 42 protected: 43 CallGenerator(ciMethod* method) : _method(method) {} 44 45 void do_late_inline_helper(); 46 47 virtual bool do_late_inline_check(Compile* C, JVMState* jvms) { ShouldNotReachHere(); return false; } 48 virtual CallGenerator* inline_cg() const { ShouldNotReachHere(); return nullptr;} 49 virtual bool is_pure_call() const { ShouldNotReachHere(); return false; } 50 51 public: 52 // Accessors 53 ciMethod* method() const { return _method; } 54 55 // is_inline: At least some code implementing the method is copied here. 56 virtual bool is_inline() const { return false; } 57 // is_intrinsic: There's a method-specific way of generating the inline code. 58 virtual bool is_intrinsic() const { return false; } 59 // is_parse: Bytecodes implementing the specific method are copied here. 60 virtual bool is_parse() const { return false; } 61 // is_virtual: The call uses the receiver type to select or check the method. 62 virtual bool is_virtual() const { return false; } 63 // is_deferred: The decision whether to inline or not is deferred. 64 virtual bool is_deferred() const { return false; } 65 // is_predicated: Uses an explicit check (predicate). 66 virtual bool is_predicated() const { return false; } 67 virtual int predicates_count() const { return 0; } 68 // is_trap: Does not return to the caller. (E.g., uncommon trap.) 69 virtual bool is_trap() const { return false; } 70 // does_virtual_dispatch: Should try inlining as normal method first. 71 virtual bool does_virtual_dispatch() const { return false; } 72 73 // is_late_inline: supports conversion of call into an inline 74 virtual bool is_late_inline() const { return false; } 75 // same but for method handle calls 76 virtual bool is_mh_late_inline() const { return false; } 77 virtual bool is_string_late_inline() const { return false; } 78 virtual bool is_virtual_late_inline() const { return false; } 79 80 // Replace the call with an inline version of the code 81 virtual void do_late_inline() { ShouldNotReachHere(); } 82 83 virtual CallNode* call_node() const { return nullptr; } 84 virtual CallGenerator* with_call_node(CallNode* call) { return this; } 85 86 virtual void set_unique_id(jlong id) { fatal("unique id only for late inlines"); }; 87 virtual jlong unique_id() const { fatal("unique id only for late inlines"); return 0; }; 88 89 virtual void set_callee_method(ciMethod* callee) { ShouldNotReachHere(); } 90 91 // Note: It is possible for a CG to be both inline and virtual. 92 // (The hashCode intrinsic does a vtable check and an inlined fast path.) 93 94 // Allocate CallGenerators only in Compile arena since some of them are referenced from CallNodes. 95 void* operator new(size_t size) throw() { 96 Compile* C = Compile::current(); 97 return ArenaObj::operator new(size, C->comp_arena()); 98 } 99 100 // Utilities: 101 const TypeFunc* tf() const; 102 103 // The given jvms has state and arguments for a call to my method. 104 // Edges after jvms->argoff() carry all (pre-popped) argument values. 105 // 106 // Update the map with state and return values (if any) and return it. 107 // The return values (0, 1, or 2) must be pushed on the map's stack, 108 // and the sp of the jvms incremented accordingly. 109 // 110 // The jvms is returned on success. Alternatively, a copy of the 111 // given jvms, suitably updated, may be returned, in which case the 112 // caller should discard the original jvms. 113 // 114 // The non-Parm edges of the returned map will contain updated global state, 115 // and one or two edges before jvms->sp() will carry any return values. 116 // Other map edges may contain locals or monitors, and should not 117 // be changed in meaning. 118 // 119 // If the call traps, the returned map must have a control edge of top. 120 // If the call can throw, the returned map must report has_exceptions(). 121 // 122 // If the result is null, it means that this CallGenerator was unable 123 // to handle the given call, and another CallGenerator should be consulted. 124 virtual JVMState* generate(JVMState* jvms) = 0; 125 126 // How to generate a call site that is inlined: 127 static CallGenerator* for_inline(ciMethod* m, float expected_uses = -1); 128 // How to generate code for an on-stack replacement handler. 129 static CallGenerator* for_osr(ciMethod* m, int osr_bci); 130 131 // How to generate vanilla out-of-line call sites: 132 static CallGenerator* for_direct_call(ciMethod* m, bool separate_io_projs = false); // static, special 133 static CallGenerator* for_virtual_call(ciMethod* m, int vtable_index); // virtual, interface 134 135 static CallGenerator* for_method_handle_call( JVMState* jvms, ciMethod* caller, ciMethod* callee, bool allow_inline); 136 static CallGenerator* for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool allow_inline, bool& input_not_const); 137 138 // How to generate a replace a direct call with an inline version 139 static CallGenerator* for_late_inline(ciMethod* m, CallGenerator* inline_cg); 140 static CallGenerator* for_mh_late_inline(ciMethod* caller, ciMethod* callee, bool input_not_const); 141 static CallGenerator* for_string_late_inline(ciMethod* m, CallGenerator* inline_cg); 142 static CallGenerator* for_boxing_late_inline(ciMethod* m, CallGenerator* inline_cg); 143 static CallGenerator* for_vector_reboxing_late_inline(ciMethod* m, CallGenerator* inline_cg); 144 static CallGenerator* for_late_inline_virtual(ciMethod* m, int vtable_index, float expected_uses); 145 146 // How to make a call that optimistically assumes a receiver type: 147 static CallGenerator* for_predicted_call(ciKlass* predicted_receiver, 148 CallGenerator* if_missed, 149 CallGenerator* if_hit, 150 float hit_prob); 151 152 static CallGenerator* for_guarded_call(ciKlass* predicted_receiver, 153 CallGenerator* if_missed, 154 CallGenerator* if_hit); 155 156 // How to make a call that optimistically assumes a MethodHandle target: 157 static CallGenerator* for_predicted_dynamic_call(ciMethodHandle* predicted_method_handle, 158 CallGenerator* if_missed, 159 CallGenerator* if_hit, 160 float hit_prob); 161 162 // How to make a call that gives up and goes back to the interpreter: 163 static CallGenerator* for_uncommon_trap(ciMethod* m, 164 Deoptimization::DeoptReason reason, 165 Deoptimization::DeoptAction action); 166 167 // Registry for intrinsics: 168 static CallGenerator* for_intrinsic(ciMethod* m); 169 static void register_intrinsic(ciMethod* m, CallGenerator* cg); 170 static CallGenerator* for_predicated_intrinsic(CallGenerator* intrinsic, 171 CallGenerator* cg); 172 virtual Node* generate_predicate(JVMState* jvms, int predicate) { return nullptr; }; 173 174 virtual void print_inlining_late(InliningResult result, const char* msg) { ShouldNotReachHere(); } 175 176 static void print_inlining(Compile* C, ciMethod* callee, int inline_level, int bci, const char* msg) { 177 print_inlining_impl(C, callee, inline_level, bci, InliningResult::SUCCESS, msg); 178 } 179 180 static void print_inlining_failure(Compile* C, ciMethod* callee, int inline_level, int bci, const char* msg) { 181 print_inlining_impl(C, callee, inline_level, bci, InliningResult::FAILURE, msg); 182 C->log_inline_failure(msg); 183 } 184 185 static bool is_inlined_method_handle_intrinsic(JVMState* jvms, ciMethod* m); 186 static bool is_inlined_method_handle_intrinsic(ciMethod* caller, int bci, ciMethod* m); 187 static bool is_inlined_method_handle_intrinsic(ciMethod* symbolic_info, ciMethod* m); 188 189 private: 190 static void print_inlining_impl(Compile* C, ciMethod* callee, int inline_level, int bci, 191 InliningResult result, const char* msg) { 192 if (C->print_inlining()) { 193 C->print_inlining(callee, inline_level, bci, result, msg); 194 } 195 } 196 }; 197 198 199 //------------------------InlineCallGenerator---------------------------------- 200 class InlineCallGenerator : public CallGenerator { 201 protected: 202 InlineCallGenerator(ciMethod* method) : CallGenerator(method) {} 203 204 public: 205 virtual bool is_inline() const { return true; } 206 }; 207 208 #endif // SHARE_OPTO_CALLGENERATOR_HPP