1 /*
2 * Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_OPTO_LIBRARY_CALL_HPP
26 #define SHARE_OPTO_LIBRARY_CALL_HPP
27
28 #include "ci/ciMethod.hpp"
29 #include "classfile/javaClasses.hpp"
30 #include "opto/callGenerator.hpp"
31 #include "opto/castnode.hpp"
32 #include "opto/convertnode.hpp"
33 #include "opto/graphKit.hpp"
34 #include "opto/inlinetypenode.hpp"
35 #include "opto/intrinsicnode.hpp"
36 #include "opto/movenode.hpp"
37
38 class LibraryIntrinsic : public InlineCallGenerator {
39 // Extend the set of intrinsics known to the runtime:
40 public:
41 private:
42 bool _is_virtual;
43 bool _does_virtual_dispatch;
44 int8_t _predicates_count; // Intrinsic is predicated by several conditions
45 int8_t _last_predicate; // Last generated predicate
46 vmIntrinsics::ID _intrinsic_id;
47
48 public:
49 LibraryIntrinsic(ciMethod* m, bool is_virtual, int predicates_count, bool does_virtual_dispatch, vmIntrinsics::ID id)
50 : InlineCallGenerator(m),
51 _is_virtual(is_virtual),
52 _does_virtual_dispatch(does_virtual_dispatch),
53 _predicates_count((int8_t)predicates_count),
54 _last_predicate((int8_t)-1),
55 _intrinsic_id(id)
56 {
57 }
58 virtual bool is_intrinsic() const { return true; }
59 virtual bool is_virtual() const { return _is_virtual; }
60 virtual bool is_predicated() const { return _predicates_count > 0; }
61 virtual int predicates_count() const { return _predicates_count; }
62 virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; }
63 virtual JVMState* generate(JVMState* jvms);
64 virtual Node* generate_predicate(JVMState* jvms, int predicate);
65 vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
66 };
67
68
69 // Local helper class for LibraryIntrinsic:
70 class LibraryCallKit : public GraphKit {
71 private:
72 LibraryIntrinsic* _intrinsic; // the library intrinsic being called
73 Node* _result; // the result node, if any
74 int _reexecute_sp; // the stack pointer when bytecode needs to be reexecuted
75
76 const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type);
77
78 public:
79 LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic)
80 : GraphKit(jvms),
81 _intrinsic(intrinsic),
82 _result(nullptr)
83 {
84 // Check if this is a root compile. In that case we don't have a caller.
85 if (!jvms->has_method()) {
86 _reexecute_sp = sp();
87 } else {
88 // Find out how many arguments the interpreter needs when deoptimizing
89 // and save the stack pointer value so it can used by uncommon_trap.
90 // We find the argument count by looking at the declared signature.
91 bool ignored_will_link;
92 ciSignature* declared_signature = nullptr;
93 ciMethod* ignored_callee = caller()->get_method_at_bci(bci(), ignored_will_link, &declared_signature);
94 const int nargs = declared_signature->arg_size_for_bc(caller()->java_code_at_bci(bci()));
95 _reexecute_sp = sp() + nargs; // "push" arguments back on stack
96 }
97 }
98
99 virtual LibraryCallKit* is_LibraryCallKit() const { return (LibraryCallKit*)this; }
100
101 ciMethod* caller() const { return jvms()->method(); }
102 int bci() const { return jvms()->bci(); }
103 LibraryIntrinsic* intrinsic() const { return _intrinsic; }
104 vmIntrinsics::ID intrinsic_id() const { return _intrinsic->intrinsic_id(); }
105 ciMethod* callee() const { return _intrinsic->method(); }
106
107 bool try_to_inline(int predicate);
108 Node* try_to_predicate(int predicate);
109
110 void push_result() {
111 // Push the result onto the stack.
112 Node* res = result();
113 if (!stopped() && res != nullptr) {
114 if (res->is_top()) {
115 assert(false, "Can't determine return value.");
116 C->record_method_not_compilable("Can't determine return value.");
117 }
118 BasicType bt = res->bottom_type()->basic_type();
119 if (C->inlining_incrementally() && res->is_InlineType()) {
120 // The caller expects an oop when incrementally inlining an intrinsic that returns an
121 // inline type. Make sure the call is re-executed if the allocation triggers a deoptimization.
122 PreserveReexecuteState preexecs(this);
123 jvms()->set_should_reexecute(true);
124 res = res->as_InlineType()->buffer(this);
125 }
126 push_node(bt, res);
127 }
128 }
129
130 private:
131 void fatal_unexpected_iid(vmIntrinsics::ID iid) {
132 fatal("unexpected intrinsic %d: %s", vmIntrinsics::as_int(iid), vmIntrinsics::name_at(iid));
133 }
134
135 void set_result(Node* n) { assert(_result == nullptr, "only set once"); _result = n; }
136 void set_result(RegionNode* region, PhiNode* value);
137 Node* result() { return _result; }
138
139 virtual int reexecute_sp() { return _reexecute_sp; }
140
141 /* When an intrinsic makes changes before bailing out, it's necessary to restore the graph
142 * as it was. See JDK-8359344 for what can happen wrong. It's also not always possible to
143 * bailout before making changes because the bailing out decision might depend on new nodes
144 * (their types, for instance).
145 *
146 * So, if an intrinsic might cause this situation, one must start by saving the state in a
147 * SavedState by constructing it, and the state will be restored on destruction. If the
148 * intrinsic is not bailing out, one need to call discard to prevent restoring the old state.
149 */
150 class SavedState {
151 LibraryCallKit* _kit;
152 uint _sp;
153 JVMState* _jvms;
154 SafePointNode* _map;
155 Unique_Node_List _ctrl_succ;
156 bool _discarded;
157
158 public:
159 SavedState(LibraryCallKit*);
160 ~SavedState();
161 void discard();
162 };
163
164 // Helper functions to inline natives
165 Node* generate_guard(Node* test, RegionNode* region, float true_prob);
166 Node* generate_slow_guard(Node* test, RegionNode* region);
167 Node* generate_fair_guard(Node* test, RegionNode* region);
168 Node* generate_negative_guard(Node* index, RegionNode* region,
169 // resulting CastII of index:
170 Node* *pos_index = nullptr);
171 Node* generate_limit_guard(Node* offset, Node* subseq_length,
172 Node* array_length,
173 RegionNode* region);
174 void generate_string_range_check(Node* array, Node* offset,
175 Node* length, bool char_count,
176 bool halt_on_oob = false);
177 Node* current_thread_helper(Node* &tls_output, ByteSize handle_offset,
178 bool is_immutable);
179 Node* generate_current_thread(Node* &tls_output);
180 Node* generate_virtual_thread(Node* threadObj);
181 Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null,
182 RegionNode* region, int null_path,
183 int offset);
184 Node* load_klass_from_mirror(Node* mirror, bool never_see_null,
185 RegionNode* region, int null_path) {
186 int offset = java_lang_Class::klass_offset();
187 return load_klass_from_mirror_common(mirror, never_see_null,
188 region, null_path,
189 offset);
190 }
191 Node* load_array_klass_from_mirror(Node* mirror, bool never_see_null,
192 RegionNode* region, int null_path) {
193 int offset = java_lang_Class::array_klass_offset();
194 return load_klass_from_mirror_common(mirror, never_see_null,
195 region, null_path,
196 offset);
197 }
198 Node* load_default_refined_array_klass(Node* klass_node, bool type_array_guard = true);
199
200 Node* generate_klass_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region,
201 ByteSize offset, const Type* type, BasicType bt);
202 Node* generate_misc_flags_guard(Node* kls,
203 int modifier_mask, int modifier_bits,
204 RegionNode* region);
205 Node* generate_interface_guard(Node* kls, RegionNode* region);
206
207 enum ArrayKind {
208 AnyArray,
209 NonArray,
210 RefArray,
211 NonRefArray,
212 TypeArray
213 };
214
215 Node* generate_hidden_class_guard(Node* kls, RegionNode* region);
216
217 Node* generate_array_guard(Node* kls, RegionNode* region, Node** obj = nullptr) {
218 return generate_array_guard_common(kls, region, AnyArray, obj);
219 }
220 Node* generate_non_array_guard(Node* kls, RegionNode* region, Node** obj = nullptr) {
221 return generate_array_guard_common(kls, region, NonArray, obj);
222 }
223 Node* generate_refArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) {
224 return generate_array_guard_common(kls, region, RefArray, obj);
225 }
226 Node* generate_non_refArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) {
227 return generate_array_guard_common(kls, region, NonRefArray, obj);
228 }
229 Node* generate_typeArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) {
230 return generate_array_guard_common(kls, region, TypeArray, obj);
231 }
232 Node* generate_array_guard_common(Node* kls, RegionNode* region, ArrayKind kind, Node** obj = nullptr);
233 Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region);
234 CallJavaNode* generate_method_call(vmIntrinsicID method_id, bool is_virtual, bool is_static, bool res_not_null);
235 CallJavaNode* generate_method_call_static(vmIntrinsicID method_id, bool res_not_null) {
236 return generate_method_call(method_id, false, true, res_not_null);
237 }
238 Node* load_field_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, DecoratorSet decorators = IN_HEAP, bool is_static = false, ciInstanceKlass* fromKls = nullptr);
239 Node* field_address_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, bool is_exact = true, bool is_static = false, ciInstanceKlass* fromKls = nullptr);
240
241 Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae);
242 bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae);
243 bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae);
244 bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae);
245 Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count,
246 RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae);
247 bool inline_string_indexOfChar(StrIntrinsicNode::ArgEnc ae);
248 bool inline_string_equals(StrIntrinsicNode::ArgEnc ae);
249 bool inline_vectorizedHashCode();
250 bool inline_string_toBytesU();
251 bool inline_string_getCharsU();
252 bool inline_string_copy(bool compress);
253 bool inline_string_char_access(bool is_store);
254 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName);
255 bool inline_math_native(vmIntrinsics::ID id);
256 bool inline_math(vmIntrinsics::ID id);
257 bool inline_double_math(vmIntrinsics::ID id);
258 bool inline_math_pow();
259 template <typename OverflowOp>
260 bool inline_math_overflow(Node* arg1, Node* arg2);
261 bool inline_math_mathExact(Node* math, Node* test);
262 bool inline_math_addExactI(bool is_increment);
263 bool inline_math_addExactL(bool is_increment);
264 bool inline_math_multiplyExactI();
265 bool inline_math_multiplyExactL();
266 bool inline_math_multiplyHigh();
267 bool inline_math_unsignedMultiplyHigh();
268 bool inline_math_negateExactI();
269 bool inline_math_negateExactL();
270 bool inline_math_subtractExactI(bool is_decrement);
271 bool inline_math_subtractExactL(bool is_decrement);
272 bool inline_min_max(vmIntrinsics::ID id);
273 bool inline_notify(vmIntrinsics::ID id);
274 // This returns Type::AnyPtr, RawPtr, or OopPtr.
275 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type);
276 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false);
277
278 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind;
279 DecoratorSet mo_decorator_for_access_kind(AccessKind kind);
280 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned, bool is_flat = false);
281 bool inline_unsafe_flat_access(bool is_store, AccessKind kind);
282 static bool klass_needs_init_guard(Node* kls);
283 bool inline_unsafe_allocate();
284 bool inline_unsafe_newArray(bool uninitialized);
285 bool inline_newArray(bool null_free, bool atomic);
286 typedef enum { IsFlat, IsNullRestricted, IsAtomic } ArrayPropertiesCheck;
287 bool inline_getArrayProperties(ArrayPropertiesCheck check);
288 bool inline_unsafe_writeback0();
289 bool inline_unsafe_writebackSync0(bool is_pre);
290 bool inline_unsafe_copyMemory();
291 bool inline_unsafe_make_private_buffer();
292 bool inline_unsafe_finish_private_buffer();
293 bool inline_unsafe_setMemory();
294
295 bool inline_native_currentCarrierThread();
296 bool inline_native_currentThread();
297 bool inline_native_setCurrentThread();
298
299 bool inline_native_scopedValueCache();
300 const Type* scopedValueCache_type();
301 Node* scopedValueCache_helper();
302 bool inline_native_setScopedValueCache();
303 bool inline_native_Continuation_pinning(bool unpin);
304
305 bool inline_native_time_funcs(address method, const char* funcName);
306 #if INCLUDE_JVMTI
307 bool inline_native_notify_jvmti_funcs(address funcAddr, const char* funcName, bool is_start, bool is_end);
308 bool inline_native_notify_jvmti_hide();
309 bool inline_native_notify_jvmti_sync();
310 #endif
311
312 #ifdef JFR_HAVE_INTRINSICS
313 bool inline_native_classID();
314 bool inline_native_getEventWriter();
315 bool inline_native_jvm_commit();
316 void extend_setCurrentThread(Node* jt, Node* thread);
317 #endif
318 bool inline_native_Class_query(vmIntrinsics::ID id);
319 bool inline_primitive_Class_conversion(vmIntrinsics::ID id);
320 bool inline_native_subtype_check();
321 bool inline_native_getLength();
322 bool inline_array_copyOf(bool is_copyOfRange);
323 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae);
324 bool inline_preconditions_checkIndex(BasicType bt);
325 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array);
326 bool inline_native_clone(bool is_virtual);
327 bool inline_native_Reflection_getCallerClass();
328 // Helper function for inlining native object hash method
329 bool inline_native_hashcode(bool is_virtual, bool is_static);
330 bool inline_native_getClass();
331
332 // Helper functions for inlining arraycopy
333 bool inline_arraycopy();
334 AllocateArrayNode* tightly_coupled_allocation(Node* ptr);
335 static CallStaticJavaNode* get_uncommon_trap_from_success_proj(Node* node);
336 SafePointNode* create_safepoint_with_state_before_array_allocation(const AllocateArrayNode* alloc) const;
337 void replace_unrelated_uncommon_traps_with_alloc_state(AllocateArrayNode* alloc, JVMState* saved_jvms_before_guards);
338 void replace_unrelated_uncommon_traps_with_alloc_state(JVMState* saved_jvms_before_guards);
339 void create_new_uncommon_trap(CallStaticJavaNode* uncommon_trap_call);
340 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp);
341 void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms_before_guards, int saved_reexecute_sp,
342 uint new_idx);
343 bool check_array_sort_arguments(Node* elementType, Node* obj, BasicType& bt);
344 bool inline_array_sort();
345 bool inline_array_partition();
346 typedef enum { LS_get_add, LS_get_set, LS_cmp_swap, LS_cmp_swap_weak, LS_cmp_exchange } LoadStoreKind;
347 bool inline_unsafe_load_store(BasicType type, LoadStoreKind kind, AccessKind access_kind);
348 bool inline_unsafe_fence(vmIntrinsics::ID id);
349 bool inline_onspinwait();
350 bool inline_fp_conversions(vmIntrinsics::ID id);
351 bool inline_fp_range_check(vmIntrinsics::ID id);
352 bool inline_fp16_operations(vmIntrinsics::ID id, int num_args);
353 Node* unbox_fp16_value(const TypeInstPtr* box_class, ciField* field, Node* box);
354 Node* box_fp16_value(const TypeInstPtr* box_class, ciField* field, Node* value);
355 bool inline_number_methods(vmIntrinsics::ID id);
356 bool inline_bitshuffle_methods(vmIntrinsics::ID id);
357 bool inline_compare_unsigned(vmIntrinsics::ID id);
358 bool inline_divmod_methods(vmIntrinsics::ID id);
359 bool inline_reference_get0();
360 bool inline_reference_refersTo0(bool is_phantom);
361 bool inline_reference_clear0(bool is_phantom);
362 bool inline_Class_cast();
363 bool inline_aescrypt_Block(vmIntrinsics::ID id);
364 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id);
365 bool inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id);
366 bool inline_counterMode_AESCrypt(vmIntrinsics::ID id);
367 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting);
368 Node* inline_electronicCodeBook_AESCrypt_predicate(bool decrypting);
369 Node* inline_counterMode_AESCrypt_predicate();
370 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object);
371 bool inline_ghash_processBlocks();
372 bool inline_chacha20Block();
373 bool inline_kyberNtt();
374 bool inline_kyberInverseNtt();
375 bool inline_kyberNttMult();
376 bool inline_kyberAddPoly_2();
377 bool inline_kyberAddPoly_3();
378 bool inline_kyber12To16();
379 bool inline_kyberBarrettReduce();
380 bool inline_dilithiumAlmostNtt();
381 bool inline_dilithiumAlmostInverseNtt();
382 bool inline_dilithiumNttMult();
383 bool inline_dilithiumMontMulByConstant();
384 bool inline_dilithiumDecomposePoly();
385 bool inline_base64_encodeBlock();
386 bool inline_base64_decodeBlock();
387 bool inline_poly1305_processBlocks();
388 bool inline_intpoly_montgomeryMult_P256();
389 bool inline_intpoly_assign();
390 bool inline_digestBase_implCompress(vmIntrinsics::ID id);
391 bool inline_double_keccak();
392 bool inline_digestBase_implCompressMB(int predicate);
393 bool inline_digestBase_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass,
394 BasicType elem_type, address stubAddr, const char *stubName,
395 Node* src_start, Node* ofs, Node* limit);
396 Node* get_state_from_digest_object(Node *digestBase_object, BasicType elem_type);
397 Node* get_block_size_from_digest_object(Node *digestBase_object);
398 Node* inline_digestBase_implCompressMB_predicate(int predicate);
399 bool inline_encodeISOArray(bool ascii);
400 bool inline_updateCRC32();
401 bool inline_updateBytesCRC32();
402 bool inline_updateByteBufferCRC32();
403 Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class);
404 bool inline_updateBytesCRC32C();
405 bool inline_updateDirectByteBufferCRC32C();
406 bool inline_updateBytesAdler32();
407 bool inline_updateByteBufferAdler32();
408 bool inline_multiplyToLen();
409 bool inline_countPositives();
410 bool inline_squareToLen();
411 bool inline_mulAdd();
412 bool inline_montgomeryMultiply();
413 bool inline_montgomerySquare();
414 bool inline_bigIntegerShift(bool isRightShift);
415 bool inline_vectorizedMismatch();
416 bool inline_fma(vmIntrinsics::ID id);
417 bool inline_character_compare(vmIntrinsics::ID id);
418 bool inline_galoisCounterMode_AESCrypt();
419 Node* inline_galoisCounterMode_AESCrypt_predicate();
420
421 bool inline_profileBoolean();
422 bool inline_isCompileConstant();
423
424 bool inline_continuation_do_yield();
425
426 // Vector API support
427 bool inline_vector_nary_operation(int n);
428 bool inline_vector_call(int arity);
429 bool inline_vector_frombits_coerced();
430 bool inline_vector_mask_operation();
431 bool inline_vector_mem_operation(bool is_store);
432 bool inline_vector_mem_masked_operation(bool is_store);
433 bool inline_vector_gather_scatter(bool is_scatter);
434 bool inline_vector_reduction();
435 bool inline_vector_test();
436 bool inline_vector_blend();
437 bool inline_vector_rearrange();
438 bool inline_vector_select_from();
439 bool inline_vector_compare();
440 bool inline_vector_broadcast_int();
441 bool inline_vector_convert();
442 bool inline_vector_extract();
443 bool inline_vector_insert();
444 bool inline_vector_compress_expand();
445 bool inline_index_vector();
446 bool inline_index_partially_in_upper_range();
447 bool inline_vector_select_from_two_vectors();
448
449 Node* gen_call_to_vector_math(int vector_api_op_id, BasicType bt, int num_elem, Node* opd1, Node* opd2);
450
451 enum VectorMaskUseType {
452 VecMaskUseLoad = 1 << 0,
453 VecMaskUseStore = 1 << 1,
454 VecMaskUseAll = VecMaskUseLoad | VecMaskUseStore,
455 VecMaskUsePred = 1 << 2,
456 VecMaskNotUsed = 1 << 3
457 };
458
459 bool arch_supports_vector(int op, int num_elem, BasicType type, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
460 bool arch_supports_vector_rotate(int opc, int num_elem, BasicType elem_bt, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
461
462 void clear_upper_avx() {
463 #ifdef X86
464 if (UseAVX >= 2) {
465 C->set_clear_upper_avx(true);
466 }
467 #endif
468 }
469
470 bool inline_getObjectSize();
471
472 bool inline_blackhole();
473 };
474
475 #endif // SHARE_OPTO_LIBRARY_CALL_HPP