1 /* 2 * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2025, Red Hat, Inc. All rights reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #ifndef CPU_X86_STUBDECLARATIONS_HPP 27 #define CPU_X86_STUBDECLARATIONS_HPP 28 29 #define STUBGEN_INITIAL_BLOBS_ARCH_DO(do_stub, \ 30 do_arch_blob, \ 31 do_arch_entry, \ 32 do_arch_entry_init) \ 33 do_arch_blob(initial, 20000 WINDOWS_ONLY(+1000)) \ 34 do_stub(initial, verify_mxcsr) \ 35 do_arch_entry(x86, initial, verify_mxcsr, verify_mxcsr_entry, \ 36 verify_mxcsr_entry) \ 37 do_stub(initial, get_previous_sp) \ 38 do_arch_entry(x86, initial, get_previous_sp, \ 39 get_previous_sp_entry, \ 40 get_previous_sp_entry) \ 41 do_stub(initial, f2i_fixup) \ 42 do_arch_entry(x86, initial, f2i_fixup, f2i_fixup, f2i_fixup) \ 43 do_stub(initial, f2l_fixup) \ 44 do_arch_entry(x86, initial, f2l_fixup, f2l_fixup, f2l_fixup) \ 45 do_stub(initial, d2i_fixup) \ 46 do_arch_entry(x86, initial, d2i_fixup, d2i_fixup, d2i_fixup) \ 47 do_stub(initial, d2l_fixup) \ 48 do_arch_entry(x86, initial, d2l_fixup, d2l_fixup, d2l_fixup) \ 49 do_stub(initial, float_sign_mask) \ 50 do_arch_entry(x86, initial, float_sign_mask, float_sign_mask, \ 51 float_sign_mask) \ 52 do_stub(initial, float_sign_flip) \ 53 do_arch_entry(x86, initial, float_sign_flip, float_sign_flip, \ 54 float_sign_flip) \ 55 do_stub(initial, double_sign_mask) \ 56 do_arch_entry(x86, initial, double_sign_mask, double_sign_mask, \ 57 double_sign_mask) \ 58 do_stub(initial, double_sign_flip) \ 59 do_arch_entry(x86, initial, double_sign_flip, double_sign_flip, \ 60 double_sign_flip) \ 61 62 #define STUBGEN_CONTINUATION_BLOBS_ARCH_DO(do_stub, \ 63 do_arch_blob, \ 64 do_arch_entry, \ 65 do_arch_entry_init) \ 66 do_arch_blob(continuation, 3000) \ 67 68 69 #define STUBGEN_COMPILER_BLOBS_ARCH_DO(do_stub, \ 70 do_arch_blob, \ 71 do_arch_entry, \ 72 do_arch_entry_init) \ 73 do_arch_blob(compiler, 109000 WINDOWS_ONLY(+2000)) \ 74 do_stub(compiler, vector_float_sign_mask) \ 75 do_arch_entry(x86, compiler, vector_float_sign_mask, \ 76 vector_float_sign_mask, vector_float_sign_mask) \ 77 do_stub(compiler, vector_float_sign_flip) \ 78 do_arch_entry(x86, compiler, vector_float_sign_flip, \ 79 vector_float_sign_flip, vector_float_sign_flip) \ 80 do_stub(compiler, vector_double_sign_mask) \ 81 do_arch_entry(x86, compiler, vector_double_sign_mask, \ 82 vector_double_sign_mask, vector_double_sign_mask) \ 83 do_stub(compiler, vector_double_sign_flip) \ 84 do_arch_entry(x86, compiler, vector_double_sign_flip, \ 85 vector_double_sign_flip, vector_double_sign_flip) \ 86 do_stub(compiler, vector_all_bits_set) \ 87 do_arch_entry(x86, compiler, vector_all_bits_set, \ 88 vector_all_bits_set, vector_all_bits_set) \ 89 do_stub(compiler, vector_int_mask_cmp_bits) \ 90 do_arch_entry(x86, compiler, vector_int_mask_cmp_bits, \ 91 vector_int_mask_cmp_bits, vector_int_mask_cmp_bits) \ 92 do_stub(compiler, vector_short_to_byte_mask) \ 93 do_arch_entry(x86, compiler, vector_short_to_byte_mask, \ 94 vector_short_to_byte_mask, vector_short_to_byte_mask) \ 95 do_stub(compiler, vector_byte_perm_mask) \ 96 do_arch_entry(x86, compiler,vector_byte_perm_mask, \ 97 vector_byte_perm_mask, vector_byte_perm_mask) \ 98 do_stub(compiler, vector_int_to_byte_mask) \ 99 do_arch_entry(x86, compiler, vector_int_to_byte_mask, \ 100 vector_int_to_byte_mask, vector_int_to_byte_mask) \ 101 do_stub(compiler, vector_int_to_short_mask) \ 102 do_arch_entry(x86, compiler, vector_int_to_short_mask, \ 103 vector_int_to_short_mask, vector_int_to_short_mask) \ 104 do_stub(compiler, vector_32_bit_mask) \ 105 do_arch_entry(x86, compiler, vector_32_bit_mask, \ 106 vector_32_bit_mask, vector_32_bit_mask) \ 107 do_stub(compiler, vector_64_bit_mask) \ 108 do_arch_entry(x86, compiler, vector_64_bit_mask, \ 109 vector_64_bit_mask, vector_64_bit_mask) \ 110 do_stub(compiler, vector_byte_shuffle_mask) \ 111 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 112 vector_byte_shuffle_mask, vector_byte_shuffle_mask) \ 113 do_stub(compiler, vector_short_shuffle_mask) \ 114 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 115 vector_short_shuffle_mask, vector_short_shuffle_mask) \ 116 do_stub(compiler, vector_int_shuffle_mask) \ 117 do_arch_entry(x86, compiler, vector_int_shuffle_mask, \ 118 vector_int_shuffle_mask, vector_int_shuffle_mask) \ 119 do_stub(compiler, vector_long_shuffle_mask) \ 120 do_arch_entry(x86, compiler, vector_long_shuffle_mask, \ 121 vector_long_shuffle_mask, vector_long_shuffle_mask) \ 122 do_stub(compiler, vector_long_sign_mask) \ 123 do_arch_entry(x86, compiler, vector_long_sign_mask, \ 124 vector_long_sign_mask, vector_long_sign_mask) \ 125 do_stub(compiler, vector_iota_indices) \ 126 do_arch_entry(x86, compiler, vector_iota_indices, \ 127 vector_iota_indices, vector_iota_indices) \ 128 do_stub(compiler, vector_count_leading_zeros_lut) \ 129 do_arch_entry(x86, compiler, vector_count_leading_zeros_lut, \ 130 vector_count_leading_zeros_lut, \ 131 vector_count_leading_zeros_lut) \ 132 do_stub(compiler, vector_reverse_bit_lut) \ 133 do_arch_entry(x86, compiler, vector_reverse_bit_lut, \ 134 vector_reverse_bit_lut, vector_reverse_bit_lut) \ 135 do_stub(compiler, vector_reverse_byte_perm_mask_short) \ 136 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_short, \ 137 vector_reverse_byte_perm_mask_short, \ 138 vector_reverse_byte_perm_mask_short) \ 139 do_stub(compiler, vector_reverse_byte_perm_mask_int) \ 140 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_int, \ 141 vector_reverse_byte_perm_mask_int, \ 142 vector_reverse_byte_perm_mask_int) \ 143 do_stub(compiler, vector_reverse_byte_perm_mask_long) \ 144 do_arch_entry(x86, compiler, vector_reverse_byte_perm_mask_long, \ 145 vector_reverse_byte_perm_mask_long, \ 146 vector_reverse_byte_perm_mask_long) \ 147 do_stub(compiler, vector_popcount_lut) \ 148 do_arch_entry(x86, compiler, vector_popcount_lut, \ 149 vector_popcount_lut, vector_popcount_lut) \ 150 do_stub(compiler, upper_word_mask) \ 151 do_arch_entry(x86, compiler, upper_word_mask, upper_word_mask_addr, \ 152 upper_word_mask_addr) \ 153 do_stub(compiler, shuffle_byte_flip_mask) \ 154 do_arch_entry(x86, compiler, shuffle_byte_flip_mask, \ 155 shuffle_byte_flip_mask_addr, \ 156 shuffle_byte_flip_mask_addr) \ 157 do_stub(compiler, pshuffle_byte_flip_mask) \ 158 do_arch_entry(x86, compiler, pshuffle_byte_flip_mask, \ 159 pshuffle_byte_flip_mask_addr, \ 160 pshuffle_byte_flip_mask_addr) \ 161 /* x86_64 exposes these 3 stubs via a generic entry array */ \ 162 /* other arches use arch-specific entries */ \ 163 /* this really needs rationalising */ \ 164 do_stub(compiler, string_indexof_linear_ll) \ 165 do_stub(compiler, string_indexof_linear_uu) \ 166 do_stub(compiler, string_indexof_linear_ul) \ 167 do_stub(compiler, pshuffle_byte_flip_mask_sha512) \ 168 do_arch_entry(x86, compiler, pshuffle_byte_flip_mask_sha512, \ 169 pshuffle_byte_flip_mask_addr_sha512, \ 170 pshuffle_byte_flip_mask_addr_sha512) \ 171 do_stub(compiler, compress_perm_table32) \ 172 do_arch_entry(x86, compiler, compress_perm_table32, \ 173 compress_perm_table32, compress_perm_table32) \ 174 do_stub(compiler, compress_perm_table64) \ 175 do_arch_entry(x86, compiler, compress_perm_table64, \ 176 compress_perm_table64, compress_perm_table64) \ 177 do_stub(compiler, expand_perm_table32) \ 178 do_arch_entry(x86, compiler, expand_perm_table32, \ 179 expand_perm_table32, expand_perm_table32) \ 180 do_stub(compiler, expand_perm_table64) \ 181 do_arch_entry(x86, compiler, expand_perm_table64, \ 182 expand_perm_table64, expand_perm_table64) \ 183 do_stub(compiler, avx2_shuffle_base64) \ 184 do_arch_entry(x86, compiler, avx2_shuffle_base64, \ 185 avx2_shuffle_base64, base64_avx2_shuffle_addr) \ 186 do_stub(compiler, avx2_input_mask_base64) \ 187 do_arch_entry(x86, compiler, avx2_input_mask_base64, \ 188 avx2_input_mask_base64, \ 189 base64_avx2_input_mask_addr) \ 190 do_stub(compiler, avx2_lut_base64) \ 191 do_arch_entry(x86, compiler, avx2_lut_base64, \ 192 avx2_lut_base64, base64_avx2_lut_addr) \ 193 do_stub(compiler, avx2_decode_tables_base64) \ 194 do_arch_entry(x86, compiler, avx2_decode_tables_base64, \ 195 avx2_decode_tables_base64, \ 196 base64_AVX2_decode_tables_addr) \ 197 do_stub(compiler, avx2_decode_lut_tables_base64) \ 198 do_arch_entry(x86, compiler, avx2_decode_lut_tables_base64, \ 199 avx2_decode_lut_tables_base64, \ 200 base64_AVX2_decode_LUT_tables_addr) \ 201 do_stub(compiler, shuffle_base64) \ 202 do_arch_entry(x86, compiler, shuffle_base64, shuffle_base64, \ 203 base64_shuffle_addr) \ 204 do_stub(compiler, lookup_lo_base64) \ 205 do_arch_entry(x86, compiler, lookup_lo_base64, lookup_lo_base64, \ 206 base64_vbmi_lookup_lo_addr) \ 207 do_stub(compiler, lookup_hi_base64) \ 208 do_arch_entry(x86, compiler, lookup_hi_base64, lookup_hi_base64, \ 209 base64_vbmi_lookup_hi_addr) \ 210 do_stub(compiler, lookup_lo_base64url) \ 211 do_arch_entry(x86, compiler, lookup_lo_base64url, \ 212 lookup_lo_base64url, \ 213 base64_vbmi_lookup_lo_url_addr) \ 214 do_stub(compiler, lookup_hi_base64url) \ 215 do_arch_entry(x86, compiler, lookup_hi_base64url, \ 216 lookup_hi_base64url, \ 217 base64_vbmi_lookup_hi_url_addr) \ 218 do_stub(compiler, pack_vec_base64) \ 219 do_arch_entry(x86, compiler, pack_vec_base64, pack_vec_base64, \ 220 base64_vbmi_pack_vec_addr) \ 221 do_stub(compiler, join_0_1_base64) \ 222 do_arch_entry(x86, compiler, join_0_1_base64, join_0_1_base64, \ 223 base64_vbmi_join_0_1_addr) \ 224 do_stub(compiler, join_1_2_base64) \ 225 do_arch_entry(x86, compiler, join_1_2_base64, join_1_2_base64, \ 226 base64_vbmi_join_1_2_addr) \ 227 do_stub(compiler, join_2_3_base64) \ 228 do_arch_entry(x86, compiler, join_2_3_base64, join_2_3_base64, \ 229 base64_vbmi_join_2_3_addr) \ 230 do_stub(compiler, encoding_table_base64) \ 231 do_arch_entry(x86, compiler, encoding_table_base64, \ 232 encoding_table_base64, base64_encoding_table_addr) \ 233 do_stub(compiler, decoding_table_base64) \ 234 do_arch_entry(x86, compiler, decoding_table_base64, \ 235 decoding_table_base64, base64_decoding_table_addr) \ 236 237 238 #define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \ 239 do_arch_blob, \ 240 do_arch_entry, \ 241 do_arch_entry_init) \ 242 do_arch_blob(final, 31000 \ 243 WINDOWS_ONLY(+22000) ZGC_ONLY(+20000)) \ 244 245 #endif // CPU_X86_STUBDECLARATIONS_HPP