1 /* 2 * Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OPTO_LIBRARY_CALL_HPP 26 #define SHARE_OPTO_LIBRARY_CALL_HPP 27 28 #include "ci/ciMethod.hpp" 29 #include "classfile/javaClasses.hpp" 30 #include "opto/callGenerator.hpp" 31 #include "opto/castnode.hpp" 32 #include "opto/convertnode.hpp" 33 #include "opto/graphKit.hpp" 34 #include "opto/inlinetypenode.hpp" 35 #include "opto/intrinsicnode.hpp" 36 #include "opto/movenode.hpp" 37 38 class LibraryIntrinsic : public InlineCallGenerator { 39 // Extend the set of intrinsics known to the runtime: 40 public: 41 private: 42 bool _is_virtual; 43 bool _does_virtual_dispatch; 44 int8_t _predicates_count; // Intrinsic is predicated by several conditions 45 int8_t _last_predicate; // Last generated predicate 46 vmIntrinsics::ID _intrinsic_id; 47 48 public: 49 LibraryIntrinsic(ciMethod* m, bool is_virtual, int predicates_count, bool does_virtual_dispatch, vmIntrinsics::ID id) 50 : InlineCallGenerator(m), 51 _is_virtual(is_virtual), 52 _does_virtual_dispatch(does_virtual_dispatch), 53 _predicates_count((int8_t)predicates_count), 54 _last_predicate((int8_t)-1), 55 _intrinsic_id(id) 56 { 57 } 58 virtual bool is_intrinsic() const { return true; } 59 virtual bool is_virtual() const { return _is_virtual; } 60 virtual bool is_predicated() const { return _predicates_count > 0; } 61 virtual int predicates_count() const { return _predicates_count; } 62 virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; } 63 virtual JVMState* generate(JVMState* jvms); 64 virtual Node* generate_predicate(JVMState* jvms, int predicate); 65 vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; } 66 }; 67 68 69 // Local helper class for LibraryIntrinsic: 70 class LibraryCallKit : public GraphKit { 71 private: 72 LibraryIntrinsic* _intrinsic; // the library intrinsic being called 73 Node* _result; // the result node, if any 74 int _reexecute_sp; // the stack pointer when bytecode needs to be reexecuted 75 76 const TypeOopPtr* sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type); 77 78 public: 79 LibraryCallKit(JVMState* jvms, LibraryIntrinsic* intrinsic) 80 : GraphKit(jvms), 81 _intrinsic(intrinsic), 82 _result(nullptr) 83 { 84 // Check if this is a root compile. In that case we don't have a caller. 85 if (!jvms->has_method()) { 86 _reexecute_sp = sp(); 87 } else { 88 // Find out how many arguments the interpreter needs when deoptimizing 89 // and save the stack pointer value so it can used by uncommon_trap. 90 // We find the argument count by looking at the declared signature. 91 bool ignored_will_link; 92 ciSignature* declared_signature = nullptr; 93 ciMethod* ignored_callee = caller()->get_method_at_bci(bci(), ignored_will_link, &declared_signature); 94 const int nargs = declared_signature->arg_size_for_bc(caller()->java_code_at_bci(bci())); 95 _reexecute_sp = sp() + nargs; // "push" arguments back on stack 96 } 97 } 98 99 virtual LibraryCallKit* is_LibraryCallKit() const { return (LibraryCallKit*)this; } 100 101 ciMethod* caller() const { return jvms()->method(); } 102 int bci() const { return jvms()->bci(); } 103 LibraryIntrinsic* intrinsic() const { return _intrinsic; } 104 vmIntrinsics::ID intrinsic_id() const { return _intrinsic->intrinsic_id(); } 105 ciMethod* callee() const { return _intrinsic->method(); } 106 107 bool try_to_inline(int predicate); 108 Node* try_to_predicate(int predicate); 109 110 void push_result() { 111 // Push the result onto the stack. 112 Node* res = result(); 113 if (!stopped() && res != nullptr) { 114 if (res->is_top()) { 115 assert(false, "Can't determine return value."); 116 C->record_method_not_compilable("Can't determine return value."); 117 } 118 BasicType bt = res->bottom_type()->basic_type(); 119 if (C->inlining_incrementally() && res->is_InlineType()) { 120 // The caller expects an oop when incrementally inlining an intrinsic that returns an 121 // inline type. Make sure the call is re-executed if the allocation triggers a deoptimization. 122 PreserveReexecuteState preexecs(this); 123 jvms()->set_should_reexecute(true); 124 res = res->as_InlineType()->buffer(this); 125 } 126 push_node(bt, res); 127 } 128 } 129 130 private: 131 void fatal_unexpected_iid(vmIntrinsics::ID iid) { 132 fatal("unexpected intrinsic %d: %s", vmIntrinsics::as_int(iid), vmIntrinsics::name_at(iid)); 133 } 134 135 void set_result(Node* n) { assert(_result == nullptr, "only set once"); _result = n; } 136 void set_result(RegionNode* region, PhiNode* value); 137 Node* result() { return _result; } 138 139 virtual int reexecute_sp() { return _reexecute_sp; } 140 141 /* When an intrinsic makes changes before bailing out, it's necessary to restore the graph 142 * as it was. See JDK-8359344 for what can happen wrong. It's also not always possible to 143 * bailout before making changes because the bailing out decision might depend on new nodes 144 * (their types, for instance). 145 * 146 * So, if an intrinsic might cause this situation, one must start by saving the state in a 147 * SavedState by constructing it, and the state will be restored on destruction. If the 148 * intrinsic is not bailing out, one need to call discard to prevent restoring the old state. 149 */ 150 class SavedState { 151 LibraryCallKit* _kit; 152 uint _sp; 153 JVMState* _jvms; 154 SafePointNode* _map; 155 Unique_Node_List _ctrl_succ; 156 bool _discarded; 157 158 public: 159 SavedState(LibraryCallKit*); 160 ~SavedState(); 161 void discard(); 162 }; 163 164 // Helper functions to inline natives 165 Node* generate_guard(Node* test, RegionNode* region, float true_prob); 166 Node* generate_slow_guard(Node* test, RegionNode* region); 167 Node* generate_fair_guard(Node* test, RegionNode* region); 168 Node* generate_negative_guard(Node* index, RegionNode* region, 169 // resulting CastII of index: 170 Node* *pos_index = nullptr); 171 Node* generate_limit_guard(Node* offset, Node* subseq_length, 172 Node* array_length, 173 RegionNode* region); 174 void generate_string_range_check(Node* array, Node* offset, 175 Node* length, bool char_count, 176 bool halt_on_oob = false); 177 Node* current_thread_helper(Node* &tls_output, ByteSize handle_offset, 178 bool is_immutable); 179 Node* generate_current_thread(Node* &tls_output); 180 Node* generate_virtual_thread(Node* threadObj); 181 Node* load_mirror_from_klass(Node* klass); 182 Node* load_klass_from_mirror_common(Node* mirror, bool never_see_null, 183 RegionNode* region, int null_path, 184 int offset); 185 Node* load_klass_from_mirror(Node* mirror, bool never_see_null, 186 RegionNode* region, int null_path) { 187 int offset = java_lang_Class::klass_offset(); 188 return load_klass_from_mirror_common(mirror, never_see_null, 189 region, null_path, 190 offset); 191 } 192 Node* load_array_klass_from_mirror(Node* mirror, bool never_see_null, 193 RegionNode* region, int null_path) { 194 int offset = java_lang_Class::array_klass_offset(); 195 return load_klass_from_mirror_common(mirror, never_see_null, 196 region, null_path, 197 offset); 198 } 199 Node* load_default_refined_array_klass(Node* klass_node, bool type_array_guard = true); 200 Node* load_non_refined_array_klass(Node* klass_node); 201 202 Node* generate_klass_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region, 203 ByteSize offset, const Type* type, BasicType bt); 204 Node* generate_misc_flags_guard(Node* kls, 205 int modifier_mask, int modifier_bits, 206 RegionNode* region); 207 Node* generate_interface_guard(Node* kls, RegionNode* region); 208 209 enum ArrayKind { 210 AnyArray, 211 NonArray, 212 RefArray, 213 NonRefArray, 214 TypeArray 215 }; 216 217 Node* generate_hidden_class_guard(Node* kls, RegionNode* region); 218 219 Node* generate_array_guard(Node* kls, RegionNode* region, Node** obj = nullptr) { 220 return generate_array_guard_common(kls, region, AnyArray, obj); 221 } 222 Node* generate_non_array_guard(Node* kls, RegionNode* region, Node** obj = nullptr) { 223 return generate_array_guard_common(kls, region, NonArray, obj); 224 } 225 Node* generate_refArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) { 226 return generate_array_guard_common(kls, region, RefArray, obj); 227 } 228 Node* generate_non_refArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) { 229 return generate_array_guard_common(kls, region, NonRefArray, obj); 230 } 231 Node* generate_typeArray_guard(Node* kls, RegionNode* region, Node** obj = nullptr) { 232 return generate_array_guard_common(kls, region, TypeArray, obj); 233 } 234 Node* generate_array_guard_common(Node* kls, RegionNode* region, ArrayKind kind, Node** obj = nullptr); 235 Node* generate_virtual_guard(Node* obj_klass, RegionNode* slow_region); 236 CallJavaNode* generate_method_call(vmIntrinsicID method_id, bool is_virtual, bool is_static, bool res_not_null); 237 CallJavaNode* generate_method_call_static(vmIntrinsicID method_id, bool res_not_null) { 238 return generate_method_call(method_id, false, true, res_not_null); 239 } 240 Node* load_field_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, DecoratorSet decorators = IN_HEAP, bool is_static = false, ciInstanceKlass* fromKls = nullptr); 241 Node* field_address_from_object(Node* fromObj, const char* fieldName, const char* fieldTypeString, bool is_exact = true, bool is_static = false, ciInstanceKlass* fromKls = nullptr); 242 243 Node* make_string_method_node(int opcode, Node* str1_start, Node* cnt1, Node* str2_start, Node* cnt2, StrIntrinsicNode::ArgEnc ae); 244 bool inline_string_compareTo(StrIntrinsicNode::ArgEnc ae); 245 bool inline_string_indexOf(StrIntrinsicNode::ArgEnc ae); 246 bool inline_string_indexOfI(StrIntrinsicNode::ArgEnc ae); 247 Node* make_indexOf_node(Node* src_start, Node* src_count, Node* tgt_start, Node* tgt_count, 248 RegionNode* region, Node* phi, StrIntrinsicNode::ArgEnc ae); 249 bool inline_string_indexOfChar(StrIntrinsicNode::ArgEnc ae); 250 bool inline_string_equals(StrIntrinsicNode::ArgEnc ae); 251 bool inline_vectorizedHashCode(); 252 bool inline_string_toBytesU(); 253 bool inline_string_getCharsU(); 254 bool inline_string_copy(bool compress); 255 bool inline_string_char_access(bool is_store); 256 bool runtime_math(const TypeFunc* call_type, address funcAddr, const char* funcName); 257 bool inline_math_native(vmIntrinsics::ID id); 258 bool inline_math(vmIntrinsics::ID id); 259 bool inline_double_math(vmIntrinsics::ID id); 260 bool inline_math_pow(); 261 template <typename OverflowOp> 262 bool inline_math_overflow(Node* arg1, Node* arg2); 263 bool inline_math_mathExact(Node* math, Node* test); 264 bool inline_math_addExactI(bool is_increment); 265 bool inline_math_addExactL(bool is_increment); 266 bool inline_math_multiplyExactI(); 267 bool inline_math_multiplyExactL(); 268 bool inline_math_multiplyHigh(); 269 bool inline_math_unsignedMultiplyHigh(); 270 bool inline_math_negateExactI(); 271 bool inline_math_negateExactL(); 272 bool inline_math_subtractExactI(bool is_decrement); 273 bool inline_math_subtractExactL(bool is_decrement); 274 bool inline_min_max(vmIntrinsics::ID id); 275 bool inline_notify(vmIntrinsics::ID id); 276 // This returns Type::AnyPtr, RawPtr, or OopPtr. 277 int classify_unsafe_addr(Node* &base, Node* &offset, BasicType type); 278 Node* make_unsafe_address(Node*& base, Node* offset, BasicType type = T_ILLEGAL, bool can_cast = false); 279 280 typedef enum { Relaxed, Opaque, Volatile, Acquire, Release } AccessKind; 281 DecoratorSet mo_decorator_for_access_kind(AccessKind kind); 282 bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned); 283 bool inline_unsafe_flat_access(bool is_store, AccessKind kind); 284 static bool klass_needs_init_guard(Node* kls); 285 bool inline_unsafe_allocate(); 286 bool inline_unsafe_newArray(bool uninitialized); 287 bool inline_newArray(bool null_free, bool atomic); 288 typedef enum { IsFlat, IsNullRestricted, IsAtomic } ArrayPropertiesCheck; 289 bool inline_getArrayProperties(ArrayPropertiesCheck check); 290 bool inline_unsafe_writeback0(); 291 bool inline_unsafe_writebackSync0(bool is_pre); 292 bool inline_unsafe_copyMemory(); 293 bool inline_unsafe_make_private_buffer(); 294 bool inline_unsafe_finish_private_buffer(); 295 bool inline_unsafe_setMemory(); 296 297 bool inline_native_currentCarrierThread(); 298 bool inline_native_currentThread(); 299 bool inline_native_setCurrentThread(); 300 301 bool inline_native_scopedValueCache(); 302 const Type* scopedValueCache_type(); 303 Node* scopedValueCache_helper(); 304 bool inline_native_setScopedValueCache(); 305 bool inline_native_Continuation_pinning(bool unpin); 306 307 bool inline_native_time_funcs(address method, const char* funcName); 308 309 bool inline_native_vthread_start_transition(address funcAddr, const char* funcName, bool is_final_transition); 310 bool inline_native_vthread_end_transition(address funcAddr, const char* funcName, bool is_first_transition); 311 312 #if INCLUDE_JVMTI 313 bool inline_native_notify_jvmti_sync(); 314 #endif 315 316 #ifdef JFR_HAVE_INTRINSICS 317 bool inline_native_classID(); 318 bool inline_native_getEventWriter(); 319 bool inline_native_jvm_commit(); 320 void extend_setCurrentThread(Node* jt, Node* thread); 321 #endif 322 bool inline_native_Class_query(vmIntrinsics::ID id); 323 bool inline_primitive_Class_conversion(vmIntrinsics::ID id); 324 bool inline_native_subtype_check(); 325 bool inline_native_getLength(); 326 bool inline_array_copyOf(bool is_copyOfRange); 327 bool inline_array_equals(StrIntrinsicNode::ArgEnc ae); 328 bool inline_preconditions_checkIndex(BasicType bt); 329 void copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array); 330 bool inline_native_clone(bool is_virtual); 331 bool inline_native_Reflection_getCallerClass(); 332 // Helper function for inlining native object hash method 333 bool inline_native_hashcode(bool is_virtual, bool is_static); 334 bool inline_native_getClass(); 335 336 // Helper functions for inlining arraycopy 337 bool inline_arraycopy(); 338 AllocateArrayNode* tightly_coupled_allocation(Node* ptr); 339 static CallStaticJavaNode* get_uncommon_trap_from_success_proj(Node* node); 340 SafePointNode* create_safepoint_with_state_before_array_allocation(const AllocateArrayNode* alloc) const; 341 void replace_unrelated_uncommon_traps_with_alloc_state(AllocateArrayNode* alloc, JVMState* saved_jvms_before_guards); 342 void replace_unrelated_uncommon_traps_with_alloc_state(JVMState* saved_jvms_before_guards); 343 void create_new_uncommon_trap(CallStaticJavaNode* uncommon_trap_call); 344 JVMState* arraycopy_restore_alloc_state(AllocateArrayNode* alloc, int& saved_reexecute_sp); 345 void arraycopy_move_allocation_here(AllocateArrayNode* alloc, Node* dest, JVMState* saved_jvms_before_guards, int saved_reexecute_sp, 346 uint new_idx); 347 bool check_array_sort_arguments(Node* elementType, Node* obj, BasicType& bt); 348 bool inline_array_sort(); 349 bool inline_array_partition(); 350 typedef enum { LS_get_add, LS_get_set, LS_cmp_swap, LS_cmp_swap_weak, LS_cmp_exchange } LoadStoreKind; 351 bool inline_unsafe_load_store(BasicType type, LoadStoreKind kind, AccessKind access_kind); 352 bool inline_unsafe_fence(vmIntrinsics::ID id); 353 bool inline_arrayInstanceBaseOffset(); 354 bool inline_arrayInstanceIndexScale(); 355 bool inline_arrayLayout(); 356 bool inline_getFieldMap(); 357 bool inline_onspinwait(); 358 bool inline_fp_conversions(vmIntrinsics::ID id); 359 bool inline_fp_range_check(vmIntrinsics::ID id); 360 bool inline_fp16_operations(vmIntrinsics::ID id, int num_args); 361 Node* unbox_fp16_value(const TypeInstPtr* box_class, ciField* field, Node* box); 362 Node* box_fp16_value(const TypeInstPtr* box_class, ciField* field, Node* value); 363 bool inline_number_methods(vmIntrinsics::ID id); 364 bool inline_bitshuffle_methods(vmIntrinsics::ID id); 365 bool inline_compare_unsigned(vmIntrinsics::ID id); 366 bool inline_divmod_methods(vmIntrinsics::ID id); 367 bool inline_reference_get0(); 368 bool inline_reference_refersTo0(bool is_phantom); 369 bool inline_reference_clear0(bool is_phantom); 370 bool inline_Class_cast(); 371 bool inline_aescrypt_Block(vmIntrinsics::ID id); 372 bool inline_cipherBlockChaining_AESCrypt(vmIntrinsics::ID id); 373 bool inline_electronicCodeBook_AESCrypt(vmIntrinsics::ID id); 374 bool inline_counterMode_AESCrypt(vmIntrinsics::ID id); 375 Node* inline_cipherBlockChaining_AESCrypt_predicate(bool decrypting); 376 Node* inline_electronicCodeBook_AESCrypt_predicate(bool decrypting); 377 Node* inline_counterMode_AESCrypt_predicate(); 378 Node* get_key_start_from_aescrypt_object(Node* aescrypt_object, bool is_decrypt); 379 bool inline_ghash_processBlocks(); 380 bool inline_chacha20Block(); 381 bool inline_kyberNtt(); 382 bool inline_kyberInverseNtt(); 383 bool inline_kyberNttMult(); 384 bool inline_kyberAddPoly_2(); 385 bool inline_kyberAddPoly_3(); 386 bool inline_kyber12To16(); 387 bool inline_kyberBarrettReduce(); 388 bool inline_dilithiumAlmostNtt(); 389 bool inline_dilithiumAlmostInverseNtt(); 390 bool inline_dilithiumNttMult(); 391 bool inline_dilithiumMontMulByConstant(); 392 bool inline_dilithiumDecomposePoly(); 393 bool inline_base64_encodeBlock(); 394 bool inline_base64_decodeBlock(); 395 bool inline_poly1305_processBlocks(); 396 bool inline_intpoly_montgomeryMult_P256(); 397 bool inline_intpoly_assign(); 398 bool inline_digestBase_implCompress(vmIntrinsics::ID id); 399 bool inline_double_keccak(); 400 bool inline_digestBase_implCompressMB(int predicate); 401 bool inline_digestBase_implCompressMB(Node* digestBaseObj, ciInstanceKlass* instklass, 402 BasicType elem_type, address stubAddr, const char *stubName, 403 Node* src_start, Node* ofs, Node* limit); 404 Node* get_state_from_digest_object(Node *digestBase_object, BasicType elem_type); 405 Node* get_block_size_from_digest_object(Node *digestBase_object); 406 Node* inline_digestBase_implCompressMB_predicate(int predicate); 407 bool inline_encodeISOArray(bool ascii); 408 bool inline_updateCRC32(); 409 bool inline_updateBytesCRC32(); 410 bool inline_updateByteBufferCRC32(); 411 Node* get_table_from_crc32c_class(ciInstanceKlass *crc32c_class); 412 bool inline_updateBytesCRC32C(); 413 bool inline_updateDirectByteBufferCRC32C(); 414 bool inline_updateBytesAdler32(); 415 bool inline_updateByteBufferAdler32(); 416 bool inline_multiplyToLen(); 417 bool inline_countPositives(); 418 bool inline_squareToLen(); 419 bool inline_mulAdd(); 420 bool inline_montgomeryMultiply(); 421 bool inline_montgomerySquare(); 422 bool inline_bigIntegerShift(bool isRightShift); 423 bool inline_vectorizedMismatch(); 424 bool inline_fma(vmIntrinsics::ID id); 425 bool inline_character_compare(vmIntrinsics::ID id); 426 bool inline_galoisCounterMode_AESCrypt(); 427 Node* inline_galoisCounterMode_AESCrypt_predicate(); 428 429 bool inline_profileBoolean(); 430 bool inline_isCompileConstant(); 431 432 bool inline_continuation_do_yield(); 433 434 // Vector API support 435 bool inline_vector_nary_operation(int n); 436 bool inline_vector_call(int arity); 437 bool inline_vector_frombits_coerced(); 438 bool inline_vector_mask_operation(); 439 bool inline_vector_mem_operation(bool is_store); 440 bool inline_vector_mem_masked_operation(bool is_store); 441 bool inline_vector_gather_scatter(bool is_scatter); 442 bool inline_vector_reduction(); 443 bool inline_vector_test(); 444 bool inline_vector_blend(); 445 bool inline_vector_rearrange(); 446 bool inline_vector_select_from(); 447 bool inline_vector_compare(); 448 bool inline_vector_broadcast_int(); 449 bool inline_vector_convert(); 450 bool inline_vector_extract(); 451 bool inline_vector_insert(); 452 bool inline_vector_compress_expand(); 453 bool inline_index_vector(); 454 bool inline_index_partially_in_upper_range(); 455 bool inline_vector_select_from_two_vectors(); 456 457 Node* gen_call_to_vector_math(int vector_api_op_id, BasicType bt, int num_elem, Node* opd1, Node* opd2); 458 459 enum VectorMaskUseType { 460 VecMaskUseLoad = 1 << 0, 461 VecMaskUseStore = 1 << 1, 462 VecMaskUseAll = VecMaskUseLoad | VecMaskUseStore, 463 VecMaskUsePred = 1 << 2, 464 VecMaskNotUsed = 1 << 3 465 }; 466 467 bool arch_supports_vector(int op, int num_elem, BasicType type, VectorMaskUseType mask_use_type, bool has_scalar_args = false); 468 bool arch_supports_vector_rotate(int opc, int num_elem, BasicType elem_bt, VectorMaskUseType mask_use_type, bool has_scalar_args = false); 469 470 void clear_upper_avx() { 471 #ifdef X86 472 if (UseAVX >= 2) { 473 C->set_clear_upper_avx(true); 474 } 475 #endif 476 } 477 478 bool inline_getObjectSize(); 479 480 bool inline_blackhole(); 481 }; 482 483 #endif // SHARE_OPTO_LIBRARY_CALL_HPP --- EOF ---