1 /*
2 * Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2020, Red Hat, Inc. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 */
24
25 #include "asm/macroAssembler.inline.hpp"
26 #include "code/vmreg.inline.hpp"
27 #include "runtime/jniHandles.hpp"
28 #include "runtime/jniHandles.inline.hpp"
29 #include "oops/typeArrayOop.inline.hpp"
30 #include "oops/oopCast.inline.hpp"
31 #include "prims/foreignGlobals.hpp"
32 #include "prims/foreignGlobals.inline.hpp"
33 #include "prims/vmstorage.hpp"
34 #include "utilities/formatBuffer.hpp"
35
36 #define __ masm->
37
38 bool ABIDescriptor::is_volatile_reg(Register reg) const {
39 return _integer_volatile_registers.contains(reg);
40 }
41
42 bool ABIDescriptor::is_volatile_reg(FloatRegister reg) const {
43 return _float_argument_registers.contains(reg)
44 || _float_additional_volatile_registers.contains(reg);
45 }
46
47 bool ForeignGlobals::is_foreign_linker_supported() {
48 return true;
49 }
50
51 const ABIDescriptor ForeignGlobals::parse_abi_descriptor(jobject jabi) {
52 oop abi_oop = JNIHandles::resolve_non_null(jabi);
53 ABIDescriptor abi;
54
55 objArrayOop inputStorage = jdk_internal_foreign_abi_ABIDescriptor::inputStorage(abi_oop);
56 parse_register_array(inputStorage, StorageType::INTEGER, abi._integer_argument_registers, as_Register);
57 parse_register_array(inputStorage, StorageType::FLOAT, abi._float_argument_registers, as_FloatRegister);
58
59 objArrayOop outputStorage = jdk_internal_foreign_abi_ABIDescriptor::outputStorage(abi_oop);
60 parse_register_array(outputStorage, StorageType::INTEGER, abi._integer_return_registers, as_Register);
61 parse_register_array(outputStorage, StorageType::FLOAT, abi._float_return_registers, as_FloatRegister);
62
63 objArrayOop volatileStorage = jdk_internal_foreign_abi_ABIDescriptor::volatileStorage(abi_oop);
64 parse_register_array(volatileStorage, StorageType::INTEGER, abi._integer_volatile_registers, as_Register);
65 parse_register_array(volatileStorage, StorageType::FLOAT, abi._float_additional_volatile_registers, as_FloatRegister);
66
67 abi._stack_alignment_bytes = jdk_internal_foreign_abi_ABIDescriptor::stackAlignment(abi_oop);
68 abi._shadow_space_bytes = jdk_internal_foreign_abi_ABIDescriptor::shadowSpace(abi_oop);
69
70 abi._scratch1 = parse_vmstorage(jdk_internal_foreign_abi_ABIDescriptor::scratch1(abi_oop));
71 abi._scratch2 = parse_vmstorage(jdk_internal_foreign_abi_ABIDescriptor::scratch2(abi_oop));
72
73 return abi;
74 }
75
76 int RegSpiller::pd_reg_size(VMStorage reg) {
77 if (reg.type() == StorageType::INTEGER || reg.type() == StorageType::FLOAT) {
78 return 8;
79 }
80 return 0; // stack and BAD
81 }
82
83 void RegSpiller::pd_store_reg(MacroAssembler* masm, int offset, VMStorage reg) {
84 if (reg.type() == StorageType::INTEGER) {
85 __ reg2mem_opt(as_Register(reg), Address(Z_SP, offset), true);
86 } else if (reg.type() == StorageType::FLOAT) {
87 __ freg2mem_opt(as_FloatRegister(reg), Address(Z_SP, offset), true);
88 } else {
89 // stack and BAD
90 }
91 }
92
93 void RegSpiller::pd_load_reg(MacroAssembler* masm, int offset, VMStorage reg) {
94 if (reg.type() == StorageType::INTEGER) {
95 __ mem2reg_opt(as_Register(reg), Address(Z_SP, offset), true);
96 } else if (reg.type() == StorageType::FLOAT) {
97 __ mem2freg_opt(as_FloatRegister(reg), Address(Z_SP, offset), true);
98 } else {
99 // stack and BAD
100 }
101 }
102
103 static int reg2offset(VMStorage vms, int stk_bias) {
104 assert(!vms.is_reg(), "wrong usage");
105 return vms.index_or_offset() + stk_bias;
106 }
107
108 static void move_reg(MacroAssembler* masm, int out_stk_bias,
109 VMStorage from_reg, VMStorage to_reg) {
110 int out_bias = 0;
111 switch (to_reg.type()) {
112 case StorageType::INTEGER:
113 if (to_reg.segment_mask() == REG64_MASK && from_reg.segment_mask() == REG32_MASK ) {
114 // see CCallingConventionRequiresIntsAsLongs
115 __ z_lgfr(as_Register(to_reg), as_Register(from_reg));
116 } else {
117 __ lgr_if_needed(as_Register(to_reg), as_Register(from_reg));
118 }
119 break;
120 case StorageType::STACK:
121 out_bias = out_stk_bias; //fallthrough
122 case StorageType::FRAME_DATA: {
123 // Integer types always get a 64 bit slot in C.
124 if (from_reg.segment_mask() == REG32_MASK) {
125 // see CCallingConventionRequiresIntsAsLongs
126 __ z_lgfr(as_Register(from_reg), as_Register(from_reg));
127 }
128 switch (to_reg.stack_size()) {
129 case 8: __ reg2mem_opt(as_Register(from_reg), Address(Z_SP, reg2offset(to_reg, out_bias)), true); break;
130 case 4: __ reg2mem_opt(as_Register(from_reg), Address(Z_SP, reg2offset(to_reg, out_bias)), false); break;
131 default: ShouldNotReachHere();
132 }
133 } break;
134 default: ShouldNotReachHere();
135 }
136 }
137
138 static void move_float(MacroAssembler* masm, int out_stk_bias,
139 VMStorage from_reg, VMStorage to_reg) {
140 switch (to_reg.type()) {
141 case StorageType::FLOAT:
142 if (from_reg.segment_mask() == REG64_MASK)
143 __ move_freg_if_needed(as_FloatRegister(to_reg), T_DOUBLE, as_FloatRegister(from_reg), T_DOUBLE);
144 else
145 __ move_freg_if_needed(as_FloatRegister(to_reg), T_FLOAT, as_FloatRegister(from_reg), T_FLOAT);
146 break;
147 case StorageType::STACK:
148 if (from_reg.segment_mask() == REG64_MASK) {
149 assert(to_reg.stack_size() == 8, "size should match");
150 __ freg2mem_opt(as_FloatRegister(from_reg), Address(Z_SP, reg2offset(to_reg, out_stk_bias)), true);
151 } else {
152 assert(to_reg.stack_size() == 4, "size should match");
153 __ freg2mem_opt(as_FloatRegister(from_reg), Address(Z_SP, reg2offset(to_reg, out_stk_bias)), false);
154 }
155 break;
156 default: ShouldNotReachHere();
157 }
158 }
159
160 static void move_stack(MacroAssembler* masm, Register tmp_reg, int in_stk_bias, int out_stk_bias,
161 VMStorage from_reg, VMStorage to_reg) {
162 int out_bias = 0;
163 Address from_addr(Z_R11, reg2offset(from_reg, in_stk_bias));
164 switch (to_reg.type()) {
165 case StorageType::INTEGER:
166 switch (from_reg.stack_size()) {
167 case 8: __ mem2reg_opt(as_Register(to_reg), from_addr, true);break;
168 case 4: __ mem2reg_signed_opt(as_Register(to_reg), from_addr);break;
169 default: ShouldNotReachHere();
170 }
171 break;
172 case StorageType::FLOAT:
173 switch (from_reg.stack_size()) {
174 case 8: __ mem2freg_opt(as_FloatRegister(to_reg), from_addr, true);break;
175 case 4: __ mem2freg_opt(as_FloatRegister(to_reg), from_addr, false);break;
176 default: ShouldNotReachHere();
177 }
178 break;
179 case StorageType::STACK:
180 out_bias = out_stk_bias; // fallthrough
181 case StorageType::FRAME_DATA: {
182 switch (from_reg.stack_size()) {
183 case 8: __ mem2reg_opt(tmp_reg, from_addr, true); break;
184 case 4: if (to_reg.stack_size() == 8) {
185 __ mem2reg_signed_opt(tmp_reg, from_addr);
186 } else {
187 __ mem2reg_opt(tmp_reg, from_addr, false);
188 }
189 break;
190 default: ShouldNotReachHere();
191 }
192 switch (to_reg.stack_size()) {
193 case 8: __ reg2mem_opt(tmp_reg, Address (Z_SP, reg2offset(to_reg, out_bias)), true); break;
194 case 4: __ reg2mem_opt(tmp_reg, Address (Z_SP, reg2offset(to_reg, out_bias)), false); break;
195 default: ShouldNotReachHere();
196 }
197 } break;
198 default: ShouldNotReachHere();
199 }
200 }
201
202 void ArgumentShuffle::pd_generate(MacroAssembler* masm, VMStorage tmp, int in_stk_bias, int out_stk_bias) const {
203 Register tmp_reg = as_Register(tmp);
204 for (int i = 0; i < _moves.length(); i++) {
205 Move move = _moves.at(i);
206 VMStorage from_reg = move.from;
207 VMStorage to_reg = move.to;
208
209 switch (from_reg.type()) {
210 case StorageType::INTEGER:
211 move_reg(masm, out_stk_bias, from_reg, to_reg);
212 break;
213 case StorageType::FLOAT:
214 move_float(masm, out_stk_bias, from_reg, to_reg);
215 break;
216 case StorageType::STACK:
217 move_stack(masm, tmp_reg, in_stk_bias, out_stk_bias, from_reg, to_reg);
218 break;
219 default: ShouldNotReachHere();
220 }
221 }
222 }