157 ld(c_rarg3, Address(sp, arg3_sp_offset * wordSize));
158 addi(sp, sp, (arg_num + 1) * wordSize);
159 } else {
160 mv(c_rarg1, arg1);
161 mv(c_rarg2, arg2);
162 mv(c_rarg3, arg3);
163 }
164 return call_RT(oop_result, metadata_result, entry, arg_num);
165 }
166
167 enum return_state_t {
168 does_not_return, requires_return
169 };
170
171 // Implementation of StubFrame
172
173 class StubFrame: public StackObj {
174 private:
175 StubAssembler* _sasm;
176 bool _return_state;
177
178 public:
179 StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments, return_state_t return_state=requires_return);
180 void load_argument(int offset_in_words, Register reg);
181
182 ~StubFrame();
183 };;
184
185 void StubAssembler::prologue(const char* name, bool must_gc_arguments) {
186 set_info(name, must_gc_arguments);
187 enter();
188 }
189
190 void StubAssembler::epilogue() {
191 leave();
192 ret();
193 }
194
195 #define __ _sasm->
196
197 StubFrame::StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments, return_state_t return_state) {
198 _sasm = sasm;
199 _return_state = return_state;
200 __ prologue(name, must_gc_arguments);
201 }
202
203 // load parameters that were stored with LIR_Assembler::store_parameter
204 // Note: offsets for store_parameter and load_argument must match
205 void StubFrame::load_argument(int offset_in_words, Register reg) {
206 __ load_parameter(offset_in_words, reg);
207 }
208
209
210 StubFrame::~StubFrame() {
211 if (_return_state == requires_return) {
212 __ epilogue();
213 } else {
214 __ should_not_reach_here();
215 }
216 _sasm = nullptr;
217 }
218
219 #undef __
220
221
222 // Implementation of Runtime1
223
224 #define __ sasm->
225
226 // Stack layout for saving/restoring all the registers needed during a runtime
227 // call (this includes deoptimization)
228 // Note: note that users of this frame may well have arguments to some runtime
229 // while these values are on the stack. These positions neglect those arguments
230 // but the code in save_live_registers will take the argument count into
231 // account.
232 //
233
234 enum reg_save_layout {
235 reg_save_frame_size = 32 /* float */ + 30 /* integer excluding x3, x4 */
236 };
249 static OopMap* generate_oop_map(StubAssembler* sasm, bool save_fpu_registers) {
250 int frame_size_in_bytes = reg_save_frame_size * BytesPerWord;
251 sasm->set_frame_size(frame_size_in_bytes / BytesPerWord);
252 int frame_size_in_slots = frame_size_in_bytes / sizeof(jint);
253 OopMap* oop_map = new OopMap(frame_size_in_slots, 0);
254 assert_cond(oop_map != nullptr);
255
256 // caller save registers only, see FrameMap::initialize
257 // in c1_FrameMap_riscv.cpp for detail.
258 const static Register caller_save_cpu_regs[FrameMap::max_nof_caller_save_cpu_regs] = {
259 x7, x10, x11, x12, x13, x14, x15, x16, x17, x28, x29, x30, x31
260 };
261
262 for (int i = 0; i < FrameMap::max_nof_caller_save_cpu_regs; i++) {
263 Register r = caller_save_cpu_regs[i];
264 int sp_offset = cpu_reg_save_offsets[r->encoding()];
265 oop_map->set_callee_saved(VMRegImpl::stack2reg(sp_offset),
266 r->as_VMReg());
267 }
268
269 // fpu_regs
270 if (save_fpu_registers) {
271 for (int i = 0; i < FrameMap::nof_fpu_regs; i++) {
272 FloatRegister r = as_FloatRegister(i);
273 int sp_offset = fpu_reg_save_offsets[i];
274 oop_map->set_callee_saved(VMRegImpl::stack2reg(sp_offset),
275 r->as_VMReg());
276 }
277 }
278 return oop_map;
279 }
280
281 static OopMap* save_live_registers(StubAssembler* sasm,
282 bool save_fpu_registers = true) {
283 __ block_comment("save_live_registers");
284
285 // if the number of pushed regs is odd, one slot will be reserved for alignment
286 __ push_reg(RegSet::range(x5, x31), sp); // integer registers except ra(x1) & sp(x2) & gp(x3) & tp(x4)
287
288 if (save_fpu_registers) {
337 void Runtime1::initialize_pd() {
338 int i = 0;
339 int sp_offset = 0;
340 const int step = 2; // SP offsets are in halfwords
341
342 // all float registers are saved explicitly
343 for (i = 0; i < FrameMap::nof_fpu_regs; i++) {
344 fpu_reg_save_offsets[i] = sp_offset;
345 sp_offset += step;
346 }
347
348 // a slot reserved for stack 16-byte alignment, see MacroAssembler::push_reg
349 sp_offset += step;
350 // we save x5 ~ x31, except x0 ~ x4: loop starts from x5
351 for (i = 5; i < FrameMap::nof_cpu_regs; i++) {
352 cpu_reg_save_offsets[i] = sp_offset;
353 sp_offset += step;
354 }
355 }
356
357 // target: the entry point of the method that creates and posts the exception oop
358 // has_argument: true if the exception needs arguments (passed in t0 and t1)
359
360 OopMapSet* Runtime1::generate_exception_throw(StubAssembler* sasm, address target, bool has_argument) {
361 // make a frame and preserve the caller's caller-save registers
362 OopMap* oop_map = save_live_registers(sasm);
363 assert_cond(oop_map != nullptr);
364 int call_offset = 0;
365 if (!has_argument) {
366 call_offset = __ call_RT(noreg, noreg, target);
367 } else {
368 __ mv(c_rarg1, t0);
369 __ mv(c_rarg2, t1);
370 call_offset = __ call_RT(noreg, noreg, target);
371 }
372 OopMapSet* oop_maps = new OopMapSet();
373 assert_cond(oop_maps != nullptr);
374 oop_maps->add_gc_map(call_offset, oop_map);
375
376 return oop_maps;
862 __ check_klass_subtype_slow_path(x14, x10, x12, x15, nullptr, &miss);
863
864 // fallthrough on success:
865 __ mv(t0, 1);
866 __ sd(t0, Address(sp, (result_off) * VMRegImpl::stack_slot_size)); // result
867 __ pop_reg(RegSet::of(x10, x12, x14, x15), sp);
868 __ ret();
869
870 __ bind(miss);
871 __ sd(zr, Address(sp, (result_off) * VMRegImpl::stack_slot_size)); // result
872 __ pop_reg(RegSet::of(x10, x12, x14, x15), sp);
873 __ ret();
874 }
875 break;
876
877 case C1StubId::monitorenter_nofpu_id:
878 save_fpu_registers = false;
879 // fall through
880 case C1StubId::monitorenter_id:
881 {
882 StubFrame f(sasm, "monitorenter", dont_gc_arguments);
883 OopMap* map = save_live_registers(sasm, save_fpu_registers);
884 assert_cond(map != nullptr);
885
886 // Called with store_parameter and not C abi
887 f.load_argument(1, x10); // x10: object
888 f.load_argument(0, x11); // x11: lock address
889
890 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, monitorenter), x10, x11);
891
892 oop_maps = new OopMapSet();
893 assert_cond(oop_maps != nullptr);
894 oop_maps->add_gc_map(call_offset, map);
895 restore_live_registers(sasm, save_fpu_registers);
896 }
897 break;
898
899 case C1StubId::monitorexit_nofpu_id:
900 save_fpu_registers = false;
901 // fall through
902 case C1StubId::monitorexit_id:
|
157 ld(c_rarg3, Address(sp, arg3_sp_offset * wordSize));
158 addi(sp, sp, (arg_num + 1) * wordSize);
159 } else {
160 mv(c_rarg1, arg1);
161 mv(c_rarg2, arg2);
162 mv(c_rarg3, arg3);
163 }
164 return call_RT(oop_result, metadata_result, entry, arg_num);
165 }
166
167 enum return_state_t {
168 does_not_return, requires_return
169 };
170
171 // Implementation of StubFrame
172
173 class StubFrame: public StackObj {
174 private:
175 StubAssembler* _sasm;
176 bool _return_state;
177 bool _use_pop_on_epilogue;
178
179 public:
180 StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments,
181 return_state_t return_state, bool use_pop_on_epilogue);
182
183 public:
184 StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments, bool use_pop_on_epilogue);
185 StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments, return_state_t return_state);
186 StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments);
187
188 ~StubFrame();
189
190 void load_argument(int offset_in_words, Register reg);
191 };;
192
193 void StubAssembler::prologue(const char* name, bool must_gc_arguments) {
194 set_info(name, must_gc_arguments);
195 enter();
196 }
197
198 void StubAssembler::epilogue(bool use_pop) {
199 // Avoid using a leave instruction when this frame may
200 // have been frozen, since the current value of fp
201 // restored from the stub would be invalid. We still
202 // must restore the fp value saved on enter though.
203 if (use_pop) {
204 ld(fp, Address(sp));
205 ld(ra, Address(sp, wordSize));
206 addi(sp, sp, 2 * wordSize);
207 } else {
208 leave();
209 }
210 ret();
211 }
212
213 #define __ _sasm->
214
215 StubFrame::StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments,
216 return_state_t return_state, bool use_pop_on_epilogue)
217 : _sasm(sasm), _return_state(return_state), _use_pop_on_epilogue(use_pop_on_epilogue) {
218 __ prologue(name, must_gc_arguments);
219 }
220
221 StubFrame::StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments,
222 bool use_pop_on_epilogue) :
223 StubFrame(sasm, name, must_gc_arguments, requires_return, use_pop_on_epilogue) {}
224
225 StubFrame::StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments,
226 return_state_t return_state) :
227 StubFrame(sasm, name, must_gc_arguments, return_state, /*use_pop_on_epilogue*/false) {}
228
229 StubFrame::StubFrame(StubAssembler* sasm, const char* name, bool must_gc_arguments) :
230 StubFrame(sasm, name, must_gc_arguments, requires_return, /*use_pop_on_epilogue*/false) {}
231
232 // load parameters that were stored with LIR_Assembler::store_parameter
233 // Note: offsets for store_parameter and load_argument must match
234 void StubFrame::load_argument(int offset_in_words, Register reg) {
235 __ load_parameter(offset_in_words, reg);
236 }
237
238 StubFrame::~StubFrame() {
239 __ epilogue(_use_pop_on_epilogue);
240 }
241
242 #undef __
243
244
245 // Implementation of Runtime1
246
247 #define __ sasm->
248
249 // Stack layout for saving/restoring all the registers needed during a runtime
250 // call (this includes deoptimization)
251 // Note: note that users of this frame may well have arguments to some runtime
252 // while these values are on the stack. These positions neglect those arguments
253 // but the code in save_live_registers will take the argument count into
254 // account.
255 //
256
257 enum reg_save_layout {
258 reg_save_frame_size = 32 /* float */ + 30 /* integer excluding x3, x4 */
259 };
272 static OopMap* generate_oop_map(StubAssembler* sasm, bool save_fpu_registers) {
273 int frame_size_in_bytes = reg_save_frame_size * BytesPerWord;
274 sasm->set_frame_size(frame_size_in_bytes / BytesPerWord);
275 int frame_size_in_slots = frame_size_in_bytes / sizeof(jint);
276 OopMap* oop_map = new OopMap(frame_size_in_slots, 0);
277 assert_cond(oop_map != nullptr);
278
279 // caller save registers only, see FrameMap::initialize
280 // in c1_FrameMap_riscv.cpp for detail.
281 const static Register caller_save_cpu_regs[FrameMap::max_nof_caller_save_cpu_regs] = {
282 x7, x10, x11, x12, x13, x14, x15, x16, x17, x28, x29, x30, x31
283 };
284
285 for (int i = 0; i < FrameMap::max_nof_caller_save_cpu_regs; i++) {
286 Register r = caller_save_cpu_regs[i];
287 int sp_offset = cpu_reg_save_offsets[r->encoding()];
288 oop_map->set_callee_saved(VMRegImpl::stack2reg(sp_offset),
289 r->as_VMReg());
290 }
291
292 int sp_offset = cpu_reg_save_offsets[xthread->encoding()];
293 oop_map->set_callee_saved(VMRegImpl::stack2reg(sp_offset),
294 xthread->as_VMReg());
295
296 // fpu_regs
297 if (save_fpu_registers) {
298 for (int i = 0; i < FrameMap::nof_fpu_regs; i++) {
299 FloatRegister r = as_FloatRegister(i);
300 int sp_offset = fpu_reg_save_offsets[i];
301 oop_map->set_callee_saved(VMRegImpl::stack2reg(sp_offset),
302 r->as_VMReg());
303 }
304 }
305 return oop_map;
306 }
307
308 static OopMap* save_live_registers(StubAssembler* sasm,
309 bool save_fpu_registers = true) {
310 __ block_comment("save_live_registers");
311
312 // if the number of pushed regs is odd, one slot will be reserved for alignment
313 __ push_reg(RegSet::range(x5, x31), sp); // integer registers except ra(x1) & sp(x2) & gp(x3) & tp(x4)
314
315 if (save_fpu_registers) {
364 void Runtime1::initialize_pd() {
365 int i = 0;
366 int sp_offset = 0;
367 const int step = 2; // SP offsets are in halfwords
368
369 // all float registers are saved explicitly
370 for (i = 0; i < FrameMap::nof_fpu_regs; i++) {
371 fpu_reg_save_offsets[i] = sp_offset;
372 sp_offset += step;
373 }
374
375 // a slot reserved for stack 16-byte alignment, see MacroAssembler::push_reg
376 sp_offset += step;
377 // we save x5 ~ x31, except x0 ~ x4: loop starts from x5
378 for (i = 5; i < FrameMap::nof_cpu_regs; i++) {
379 cpu_reg_save_offsets[i] = sp_offset;
380 sp_offset += step;
381 }
382 }
383
384 // return: offset in 64-bit words.
385 uint Runtime1::runtime_blob_current_thread_offset(frame f) {
386 CodeBlob* cb = f.cb();
387 assert(cb == Runtime1::blob_for(C1StubId::monitorenter_id) ||
388 cb == Runtime1::blob_for(C1StubId::monitorenter_nofpu_id), "must be");
389 assert(cb != nullptr && cb->is_runtime_stub(), "invalid frame");
390 int offset = cpu_reg_save_offsets[xthread->encoding()];
391 return offset / 2; // SP offsets are in halfwords
392 }
393
394 // target: the entry point of the method that creates and posts the exception oop
395 // has_argument: true if the exception needs arguments (passed in t0 and t1)
396
397 OopMapSet* Runtime1::generate_exception_throw(StubAssembler* sasm, address target, bool has_argument) {
398 // make a frame and preserve the caller's caller-save registers
399 OopMap* oop_map = save_live_registers(sasm);
400 assert_cond(oop_map != nullptr);
401 int call_offset = 0;
402 if (!has_argument) {
403 call_offset = __ call_RT(noreg, noreg, target);
404 } else {
405 __ mv(c_rarg1, t0);
406 __ mv(c_rarg2, t1);
407 call_offset = __ call_RT(noreg, noreg, target);
408 }
409 OopMapSet* oop_maps = new OopMapSet();
410 assert_cond(oop_maps != nullptr);
411 oop_maps->add_gc_map(call_offset, oop_map);
412
413 return oop_maps;
899 __ check_klass_subtype_slow_path(x14, x10, x12, x15, nullptr, &miss);
900
901 // fallthrough on success:
902 __ mv(t0, 1);
903 __ sd(t0, Address(sp, (result_off) * VMRegImpl::stack_slot_size)); // result
904 __ pop_reg(RegSet::of(x10, x12, x14, x15), sp);
905 __ ret();
906
907 __ bind(miss);
908 __ sd(zr, Address(sp, (result_off) * VMRegImpl::stack_slot_size)); // result
909 __ pop_reg(RegSet::of(x10, x12, x14, x15), sp);
910 __ ret();
911 }
912 break;
913
914 case C1StubId::monitorenter_nofpu_id:
915 save_fpu_registers = false;
916 // fall through
917 case C1StubId::monitorenter_id:
918 {
919 StubFrame f(sasm, "monitorenter", dont_gc_arguments, /*use_pop_on_epilogue*/true);
920 OopMap* map = save_live_registers(sasm, save_fpu_registers);
921 assert_cond(map != nullptr);
922
923 // Called with store_parameter and not C abi
924 f.load_argument(1, x10); // x10: object
925 f.load_argument(0, x11); // x11: lock address
926
927 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, monitorenter), x10, x11);
928
929 oop_maps = new OopMapSet();
930 assert_cond(oop_maps != nullptr);
931 oop_maps->add_gc_map(call_offset, map);
932 restore_live_registers(sasm, save_fpu_registers);
933 }
934 break;
935
936 case C1StubId::monitorexit_nofpu_id:
937 save_fpu_registers = false;
938 // fall through
939 case C1StubId::monitorexit_id:
|