1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2025, Red Hat, Inc. All rights reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #ifndef CPU_X86_STUBDECLARATIONS_HPP 27 #define CPU_X86_STUBDECLARATIONS_HPP 28 29 #define STUBGEN_PREUNIVERSE_BLOBS_ARCH_DO(do_stub, \ 30 do_arch_blob, \ 31 do_arch_entry, \ 32 do_arch_entry_init) \ 33 do_arch_blob(preuniverse, 500) \ 34 35 36 #define STUBGEN_INITIAL_BLOBS_ARCH_DO(do_stub, \ 37 do_arch_blob, \ 38 do_arch_entry, \ 39 do_arch_entry_init) \ 40 do_arch_blob(initial, 20000 WINDOWS_ONLY(+1000)) \ 41 do_stub(initial, verify_mxcsr) \ 42 do_arch_entry(x86, initial, verify_mxcsr, verify_mxcsr_entry, \ 43 verify_mxcsr_entry) \ 44 do_stub(initial, get_previous_sp) \ 45 do_arch_entry(x86, initial, get_previous_sp, \ 46 get_previous_sp_entry, \ 47 get_previous_sp_entry) \ 48 do_stub(initial, f2i_fixup) \ 49 do_arch_entry(x86, initial, f2i_fixup, f2i_fixup, f2i_fixup) \ 50 do_stub(initial, f2l_fixup) \ 51 do_arch_entry(x86, initial, f2l_fixup, f2l_fixup, f2l_fixup) \ 52 do_stub(initial, d2i_fixup) \ 53 do_arch_entry(x86, initial, d2i_fixup, d2i_fixup, d2i_fixup) \ 54 do_stub(initial, d2l_fixup) \ 55 do_arch_entry(x86, initial, d2l_fixup, d2l_fixup, d2l_fixup) \ 56 do_stub(initial, float_sign_mask) \ 57 do_arch_entry(x86, initial, float_sign_mask, float_sign_mask, \ 58 float_sign_mask) \ 59 do_stub(initial, float_sign_flip) \ 60 do_arch_entry(x86, initial, float_sign_flip, float_sign_flip, \ 61 float_sign_flip) \ 62 do_stub(initial, double_sign_mask) \ 63 do_arch_entry(x86, initial, double_sign_mask, double_sign_mask, \ 64 double_sign_mask) \ 65 do_stub(initial, double_sign_flip) \ 66 do_arch_entry(x86, initial, double_sign_flip, double_sign_flip, \ 67 double_sign_flip) \ 68 69 #define STUBGEN_CONTINUATION_BLOBS_ARCH_DO(do_stub, \ 70 do_arch_blob, \ 71 do_arch_entry, \ 72 do_arch_entry_init) \ 73 do_arch_blob(continuation, 3000) \ 74 75 76 #define STUBGEN_COMPILER_BLOBS_ARCH_DO(do_stub, \ 77 do_arch_blob, \ 78 do_arch_entry, \ 79 do_arch_entry_init) \ 80 do_arch_blob(compiler, 109000 WINDOWS_ONLY(+2000)) \ 81 do_stub(compiler, vector_float_sign_mask) \ 82 do_arch_entry(x86, compiler, vector_float_sign_mask, \ 83 vector_float_sign_mask, vector_float_sign_mask) \ 84 do_stub(compiler, vector_float_sign_flip) \ 85 do_arch_entry(x86, compiler, vector_float_sign_flip, \ 86 vector_float_sign_flip, vector_float_sign_flip) \ 87 do_stub(compiler, vector_double_sign_mask) \ 88 do_arch_entry(x86, compiler, vector_double_sign_mask, \ 89 vector_double_sign_mask, vector_double_sign_mask) \ 90 do_stub(compiler, vector_double_sign_flip) \ 91 do_arch_entry(x86, compiler, vector_double_sign_flip, \ 92 vector_double_sign_flip, vector_double_sign_flip) \ 93 do_stub(compiler, vector_all_bits_set) \ 94 do_arch_entry(x86, compiler, vector_all_bits_set, \ 95 vector_all_bits_set, vector_all_bits_set) \ 96 do_stub(compiler, vector_int_mask_cmp_bits) \ 97 do_arch_entry(x86, compiler, vector_int_mask_cmp_bits, \ 98 vector_int_mask_cmp_bits, vector_int_mask_cmp_bits) \ 99 do_stub(compiler, vector_short_to_byte_mask) \ 100 do_arch_entry(x86, compiler, vector_short_to_byte_mask, \ 101 vector_short_to_byte_mask, vector_short_to_byte_mask) \ 102 do_stub(compiler, vector_byte_perm_mask) \ 103 do_arch_entry(x86, compiler,vector_byte_perm_mask, \ 104 vector_byte_perm_mask, vector_byte_perm_mask) \ 105 do_stub(compiler, vector_int_to_byte_mask) \ 106 do_arch_entry(x86, compiler, vector_int_to_byte_mask, \ 107 vector_int_to_byte_mask, vector_int_to_byte_mask) \ 108 do_stub(compiler, vector_int_to_short_mask) \ 109 do_arch_entry(x86, compiler, vector_int_to_short_mask, \ 110 vector_int_to_short_mask, vector_int_to_short_mask) \ 111 do_stub(compiler, vector_32_bit_mask) \ 112 do_arch_entry(x86, compiler, vector_32_bit_mask, \ 113 vector_32_bit_mask, vector_32_bit_mask) \ 114 do_stub(compiler, vector_64_bit_mask) \ 115 do_arch_entry(x86, compiler, vector_64_bit_mask, \ 116 vector_64_bit_mask, vector_64_bit_mask) \ 117 do_stub(compiler, vector_byte_shuffle_mask) \ 118 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 119 vector_byte_shuffle_mask, vector_byte_shuffle_mask) \ 120 do_stub(compiler, vector_short_shuffle_mask) \ 121 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 122 vector_short_shuffle_mask, vector_short_shuffle_mask) \ 123 do_stub(compiler, vector_int_shuffle_mask) \ 124 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 125 vector_int_shuffle_mask, vector_int_shuffle_mask) \ 126 do_stub(compiler, vector_long_shuffle_mask) \ 127 do_arch_entry(x86, compiler, vector_long_shuffle_mask, \ 128 vector_long_shuffle_mask, vector_long_shuffle_mask) \ 129 do_stub(compiler, vector_long_sign_mask) \ 130 do_arch_entry(x86, compiler, vector_long_sign_mask, \ 131 vector_long_sign_mask, vector_long_sign_mask) \ 132 do_stub(compiler, vector_iota_indices) \ 133 do_arch_entry(x86, compiler, vector_iota_indices, \ 134 vector_iota_indices, vector_iota_indices) \ 135 do_stub(compiler, vector_count_leading_zeros_lut) \ 136 do_arch_entry(x86, compiler, vector_count_leading_zeros_lut, \ 137 vector_count_leading_zeros_lut, \ 138 vector_count_leading_zeros_lut) \ 139 do_stub(compiler, vector_reverse_bit_lut) \ 140 do_arch_entry(x86, compiler, vector_reverse_bit_lut, \ 141 vector_reverse_bit_lut, vector_reverse_bit_lut) \ 142 do_stub(compiler, vector_reverse_byte_perm_mask_short) \ 143 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_short, \ 144 vector_reverse_byte_perm_mask_short, \ 145 vector_reverse_byte_perm_mask_short) \ 146 do_stub(compiler, vector_reverse_byte_perm_mask_int) \ 147 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_int, \ 148 vector_reverse_byte_perm_mask_int, \ 149 vector_reverse_byte_perm_mask_int) \ 150 do_stub(compiler, vector_reverse_byte_perm_mask_long) \ 151 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_long, \ 152 vector_reverse_byte_perm_mask_long, \ 153 vector_reverse_byte_perm_mask_long) \ 154 do_stub(compiler, vector_popcount_lut) \ 155 do_arch_entry(x86, compiler, vector_popcount_lut, \ 156 vector_popcount_lut, vector_popcount_lut) \ 157 do_stub(compiler, upper_word_mask) \ 158 do_arch_entry(x86, compiler, upper_word_mask, upper_word_mask_addr, \ 159 upper_word_mask_addr) \ 160 do_stub(compiler, shuffle_byte_flip_mask) \ 161 do_arch_entry(x86, compiler, shuffle_byte_flip_mask, \ 162 shuffle_byte_flip_mask_addr, \ 163 shuffle_byte_flip_mask_addr) \ 164 do_stub(compiler, pshuffle_byte_flip_mask) \ 165 do_arch_entry(x86, compiler, pshuffle_byte_flip_mask, \ 166 pshuffle_byte_flip_mask_addr, \ 167 pshuffle_byte_flip_mask_addr) \ 168 /* x86_64 exposes these 3 stubs via a generic entry array */ \ 169 /* other arches use arch-specific entries */ \ 170 /* this really needs rationalising */ \ 171 do_stub(compiler, string_indexof_linear_ll) \ 172 do_stub(compiler, string_indexof_linear_uu) \ 173 do_stub(compiler, string_indexof_linear_ul) \ 174 do_stub(compiler, pshuffle_byte_flip_mask_sha512) \ 175 do_arch_entry(x86, compiler, pshuffle_byte_flip_mask_sha512, \ 176 pshuffle_byte_flip_mask_addr_sha512, \ 177 pshuffle_byte_flip_mask_addr_sha512) \ 178 do_stub(compiler, compress_perm_table32) \ 179 do_arch_entry(x86, compiler, compress_perm_table32, \ 180 compress_perm_table32, compress_perm_table32) \ 181 do_stub(compiler, compress_perm_table64) \ 182 do_arch_entry(x86, compiler, compress_perm_table64, \ 183 compress_perm_table64, compress_perm_table64) \ 184 do_stub(compiler, expand_perm_table32) \ 185 do_arch_entry(x86, compiler, expand_perm_table32, \ 186 expand_perm_table32, expand_perm_table32) \ 187 do_stub(compiler, expand_perm_table64) \ 188 do_arch_entry(x86, compiler, expand_perm_table64, \ 189 expand_perm_table64, expand_perm_table64) \ 190 do_stub(compiler, avx2_shuffle_base64) \ 191 do_arch_entry(x86, compiler, avx2_shuffle_base64, \ 192 avx2_shuffle_base64, base64_avx2_shuffle_addr) \ 193 do_stub(compiler, avx2_input_mask_base64) \ 194 do_arch_entry(x86, compiler, avx2_input_mask_base64, \ 195 avx2_input_mask_base64, \ 196 base64_avx2_input_mask_addr) \ 197 do_stub(compiler, avx2_lut_base64) \ 198 do_arch_entry(x86, compiler, avx2_lut_base64, \ 199 avx2_lut_base64, base64_avx2_lut_addr) \ 200 do_stub(compiler, avx2_decode_tables_base64) \ 201 do_arch_entry(x86, compiler, avx2_decode_tables_base64, \ 202 avx2_decode_tables_base64, \ 203 base64_AVX2_decode_tables_addr) \ 204 do_stub(compiler, avx2_decode_lut_tables_base64) \ 205 do_arch_entry(x86, compiler, avx2_decode_lut_tables_base64, \ 206 avx2_decode_lut_tables_base64, \ 207 base64_AVX2_decode_LUT_tables_addr) \ 208 do_stub(compiler, shuffle_base64) \ 209 do_arch_entry(x86, compiler, shuffle_base64, shuffle_base64, \ 210 base64_shuffle_addr) \ 211 do_stub(compiler, lookup_lo_base64) \ 212 do_arch_entry(x86, compiler, lookup_lo_base64, lookup_lo_base64, \ 213 base64_vbmi_lookup_lo_addr) \ 214 do_stub(compiler, lookup_hi_base64) \ 215 do_arch_entry(x86, compiler, lookup_hi_base64, lookup_hi_base64, \ 216 base64_vbmi_lookup_hi_addr) \ 217 do_stub(compiler, lookup_lo_base64url) \ 218 do_arch_entry(x86, compiler, lookup_lo_base64url, \ 219 lookup_lo_base64url, \ 220 base64_vbmi_lookup_lo_url_addr) \ 221 do_stub(compiler, lookup_hi_base64url) \ 222 do_arch_entry(x86, compiler, lookup_hi_base64url, \ 223 lookup_hi_base64url, \ 224 base64_vbmi_lookup_hi_url_addr) \ 225 do_stub(compiler, pack_vec_base64) \ 226 do_arch_entry(x86, compiler, pack_vec_base64, pack_vec_base64, \ 227 base64_vbmi_pack_vec_addr) \ 228 do_stub(compiler, join_0_1_base64) \ 229 do_arch_entry(x86, compiler, join_0_1_base64, join_0_1_base64, \ 230 base64_vbmi_join_0_1_addr) \ 231 do_stub(compiler, join_1_2_base64) \ 232 do_arch_entry(x86, compiler, join_1_2_base64, join_1_2_base64, \ 233 base64_vbmi_join_1_2_addr) \ 234 do_stub(compiler, join_2_3_base64) \ 235 do_arch_entry(x86, compiler, join_2_3_base64, join_2_3_base64, \ 236 base64_vbmi_join_2_3_addr) \ 237 do_stub(compiler, encoding_table_base64) \ 238 do_arch_entry(x86, compiler, encoding_table_base64, \ 239 encoding_table_base64, base64_encoding_table_addr) \ 240 do_stub(compiler, decoding_table_base64) \ 241 do_arch_entry(x86, compiler, decoding_table_base64, \ 242 decoding_table_base64, base64_decoding_table_addr) \ 243 244 245 #define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \ 246 do_arch_blob, \ 247 do_arch_entry, \ 248 do_arch_entry_init) \ 249 do_arch_blob(final, 31000 \ 250 WINDOWS_ONLY(+22000) ZGC_ONLY(+20000)) \ 251 252 #endif // CPU_X86_STUBDECLARATIONS_HPP