1 /*
  2  * Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
  3  * Copyright (c) 2014, Red Hat Inc. All rights reserved.
  4  * Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
  5  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  6  *
  7  * This code is free software; you can redistribute it and/or modify it
  8  * under the terms of the GNU General Public License version 2 only, as
  9  * published by the Free Software Foundation.
 10  *
 11  * This code is distributed in the hope that it will be useful, but WITHOUT
 12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 14  * version 2 for more details (a copy is included in the LICENSE file that
 15  * accompanied this code).
 16  *
 17  * You should have received a copy of the GNU General Public License version
 18  * 2 along with this work; if not, write to the Free Software Foundation,
 19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 20  *
 21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 22  * or visit www.oracle.com if you need additional information or have any
 23  * questions.
 24  *
 25  */
 26 
 27 #include "precompiled.hpp"
 28 #include "asm/macroAssembler.hpp"
 29 #include "classfile/javaClasses.inline.hpp"
 30 #include "classfile/vmClasses.hpp"
 31 #include "interpreter/interpreter.hpp"
 32 #include "interpreter/interpreterRuntime.hpp"
 33 #include "memory/allocation.inline.hpp"
 34 #include "prims/jvmtiExport.hpp"
 35 #include "prims/methodHandles.hpp"
 36 #include "runtime/flags/flagSetting.hpp"
 37 #include "runtime/frame.inline.hpp"
 38 #include "runtime/stubRoutines.hpp"
 39 
 40 #define __ _masm->
 41 
 42 #ifdef PRODUCT
 43 #define BLOCK_COMMENT(str) /* nothing */
 44 #else
 45 #define BLOCK_COMMENT(str) __ block_comment(str)
 46 #endif
 47 
 48 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
 49 
 50 void MethodHandles::load_klass_from_Class(MacroAssembler* _masm, Register klass_reg) {
 51   assert_cond(_masm != NULL);
 52   if (VerifyMethodHandles) {
 53     verify_klass(_masm, klass_reg, VM_CLASS_ID(java_lang_Class),
 54                  "MH argument is a Class");
 55   }
 56   __ ld(klass_reg, Address(klass_reg, java_lang_Class::klass_offset()));
 57 }
 58 
 59 #ifdef ASSERT
 60 static int check_nonzero(const char* xname, int x) {
 61   assert(x != 0, "%s should be nonzero", xname);
 62   return x;
 63 }
 64 #define NONZERO(x) check_nonzero(#x, x)
 65 #else //ASSERT
 66 #define NONZERO(x) (x)
 67 #endif //PRODUCT
 68 
 69 #ifdef ASSERT
 70 void MethodHandles::verify_klass(MacroAssembler* _masm,
 71                                  Register obj, vmClassID klass_id,
 72                                  const char* error_message) {
 73   assert_cond(_masm != NULL);
 74   InstanceKlass** klass_addr = vmClasses::klass_addr_at(klass_id);
 75   Klass* klass = vmClasses::klass_at(klass_id);
 76   Register temp1 = t1;
 77   Register temp2 = t0; // used by MacroAssembler::cmpptr
 78   Label L_ok, L_bad;
 79   BLOCK_COMMENT("verify_klass {");
 80   __ verify_oop(obj);
 81   __ beqz(obj, L_bad);
 82   __ push_reg(RegSet::of(temp1, temp2), sp);
 83   __ load_klass(temp1, obj, temp2);
 84   __ cmpptr(temp1, ExternalAddress((address) klass_addr), L_ok);
 85   intptr_t super_check_offset = klass->super_check_offset();
 86   __ ld(temp1, Address(temp1, super_check_offset));
 87   __ cmpptr(temp1, ExternalAddress((address) klass_addr), L_ok);
 88   __ pop_reg(RegSet::of(temp1, temp2), sp);
 89   __ bind(L_bad);
 90   __ stop(error_message);
 91   __ BIND(L_ok);
 92   __ pop_reg(RegSet::of(temp1, temp2), sp);
 93   BLOCK_COMMENT("} verify_klass");
 94 }
 95 
 96 void MethodHandles::verify_ref_kind(MacroAssembler* _masm, int ref_kind, Register member_reg, Register temp) {}
 97 
 98 #endif //ASSERT
 99 
100 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register temp,
101                                             bool for_compiler_entry) {
102   assert_cond(_masm != NULL);
103   assert(method == xmethod, "interpreter calling convention");
104   Label L_no_such_method;
105   __ beqz(xmethod, L_no_such_method);
106   __ verify_method_ptr(method);
107 
108   if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
109     Label run_compiled_code;
110     // JVMTI events, such as single-stepping, are implemented partly by avoiding running
111     // compiled code in threads for which the event is enabled.  Check here for
112     // interp_only_mode if these events CAN be enabled.
113 
114     __ lwu(t0, Address(xthread, JavaThread::interp_only_mode_offset()));
115     __ beqz(t0, run_compiled_code);
116     __ ld(t0, Address(method, Method::interpreter_entry_offset()));
117     __ jr(t0);
118     __ BIND(run_compiled_code);
119   }
120 
121   const ByteSize entry_offset = for_compiler_entry ? Method::from_compiled_offset() :
122                                                      Method::from_interpreted_offset();
123   __ ld(t0,Address(method, entry_offset));
124   __ jr(t0);
125   __ bind(L_no_such_method);
126   __ far_jump(RuntimeAddress(StubRoutines::throw_AbstractMethodError_entry()));
127 }
128 
129 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
130                                         Register recv, Register method_temp,
131                                         Register temp2,
132                                         bool for_compiler_entry) {
133   assert_cond(_masm != NULL);
134   BLOCK_COMMENT("jump_to_lambda_form {");
135   // This is the initial entry point of a lazy method handle.
136   // After type checking, it picks up the invoker from the LambdaForm.
137   assert_different_registers(recv, method_temp, temp2);
138   assert(recv != noreg, "required register");
139   assert(method_temp == xmethod, "required register for loading method");
140 
141   // Load the invoker, as MH -> MH.form -> LF.vmentry
142   __ verify_oop(recv);
143   __ load_heap_oop(method_temp, Address(recv, NONZERO(java_lang_invoke_MethodHandle::form_offset())), temp2);
144   __ verify_oop(method_temp);
145   __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset())), temp2);
146   __ verify_oop(method_temp);
147   __ load_heap_oop(method_temp, Address(method_temp, NONZERO(java_lang_invoke_MemberName::method_offset())), temp2);
148   __ verify_oop(method_temp);
149   __ access_load_at(T_ADDRESS, IN_HEAP, method_temp, Address(method_temp, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset())), noreg, noreg);
150 
151   if (VerifyMethodHandles && !for_compiler_entry) {
152     // make sure recv is already on stack
153     __ ld(temp2, Address(method_temp, Method::const_offset()));
154     __ load_sized_value(temp2,
155                         Address(temp2, ConstMethod::size_of_parameters_offset()),
156                         sizeof(u2), /*is_signed*/ false);
157     Label L;
158     __ ld(t0, __ argument_address(temp2, -1));
159     __ beq(recv, t0, L);
160     __ ld(x10, __ argument_address(temp2, -1));
161     __ ebreak();
162     __ BIND(L);
163   }
164 
165   jump_from_method_handle(_masm, method_temp, temp2, for_compiler_entry);
166   BLOCK_COMMENT("} jump_to_lambda_form");
167 }
168 
169 // Code generation
170 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
171                                                                 vmIntrinsics::ID iid) {
172   assert_cond(_masm != NULL);
173   const bool not_for_compiler_entry = false;  // this is the interpreter entry
174   assert(is_signature_polymorphic(iid), "expected invoke iid");
175   if (iid == vmIntrinsics::_invokeGeneric ||
176       iid == vmIntrinsics::_compiledLambdaForm) {
177     // Perhaps surprisingly, the symbolic references visible to Java are not directly used.
178     // They are linked to Java-generated adapters via MethodHandleNatives.linkMethod.
179     // They all allow an appendix argument.
180     __ ebreak();           // empty stubs make SG sick
181     return NULL;
182   }
183 
184   // No need in interpreter entry for linkToNative for now.
185   // Interpreter calls compiled entry through i2c.
186   if (iid == vmIntrinsics::_linkToNative) {
187     __ ebreak();
188     return NULL;
189   }
190 
191   // x30: sender SP (must preserve; see prepare_to_jump_from_interpreted)
192   // xmethod: Method*
193   // x13: argument locator (parameter slot count, added to sp)
194   // x11: used as temp to hold mh or receiver
195   // x10, x29: garbage temps, blown away
196   Register argp   = x13;   // argument list ptr, live on error paths
197   Register mh     = x11;   // MH receiver; dies quickly and is recycled
198 
199   // here's where control starts out:
200   __ align(CodeEntryAlignment);
201   address entry_point = __ pc();
202 
203   if (VerifyMethodHandles) {
204     assert(Method::intrinsic_id_size_in_bytes() == 2, "assuming Method::_intrinsic_id is u2");
205 
206     Label L;
207     BLOCK_COMMENT("verify_intrinsic_id {");
208     __ lhu(t0, Address(xmethod, Method::intrinsic_id_offset_in_bytes()));
209     __ mv(t1, (int) iid);
210     __ beq(t0, t1, L);
211     if (iid == vmIntrinsics::_linkToVirtual ||
212         iid == vmIntrinsics::_linkToSpecial) {
213       // could do this for all kinds, but would explode assembly code size
214       trace_method_handle(_masm, "bad Method*::intrinsic_id");
215     }
216     __ ebreak();
217     __ bind(L);
218     BLOCK_COMMENT("} verify_intrinsic_id");
219   }
220 
221   // First task:  Find out how big the argument list is.
222   Address x13_first_arg_addr;
223   int ref_kind = signature_polymorphic_intrinsic_ref_kind(iid);
224   assert(ref_kind != 0 || iid == vmIntrinsics::_invokeBasic, "must be _invokeBasic or a linkTo intrinsic");
225   if (ref_kind == 0 || MethodHandles::ref_kind_has_receiver(ref_kind)) {
226     __ ld(argp, Address(xmethod, Method::const_offset()));
227     __ load_sized_value(argp,
228                         Address(argp, ConstMethod::size_of_parameters_offset()),
229                         sizeof(u2), /*is_signed*/ false);
230     x13_first_arg_addr = __ argument_address(argp, -1);
231   } else {
232     DEBUG_ONLY(argp = noreg);
233   }
234 
235   if (!is_signature_polymorphic_static(iid)) {
236     __ ld(mh, x13_first_arg_addr);
237     DEBUG_ONLY(argp = noreg);
238   }
239 
240   // x13_first_arg_addr is live!
241 
242   trace_method_handle_interpreter_entry(_masm, iid);
243   if (iid == vmIntrinsics::_invokeBasic) {
244     generate_method_handle_dispatch(_masm, iid, mh, noreg, not_for_compiler_entry);
245   } else {
246     // Adjust argument list by popping the trailing MemberName argument.
247     Register recv = noreg;
248     if (MethodHandles::ref_kind_has_receiver(ref_kind)) {
249       // Load the receiver (not the MH; the actual MemberName's receiver) up from the interpreter stack.
250       __ ld(recv = x12, x13_first_arg_addr);
251     }
252     DEBUG_ONLY(argp = noreg);
253     Register xmember = xmethod;  // MemberName ptr; incoming method ptr is dead now
254     __ pop_reg(xmember);             // extract last argument
255     generate_method_handle_dispatch(_masm, iid, recv, xmember, not_for_compiler_entry);
256   }
257 
258   return entry_point;
259 }
260 
261 
262 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
263                                                     vmIntrinsics::ID iid,
264                                                     Register receiver_reg,
265                                                     Register member_reg,
266                                                     bool for_compiler_entry) {
267   assert_cond(_masm != NULL);
268   assert(is_signature_polymorphic(iid), "expected invoke iid");
269   // temps used in this code are not used in *either* compiled or interpreted calling sequences
270   Register temp1 = x7;
271   Register temp2 = x28;
272   Register temp3 = x29;  // x30 is live by this point: it contains the sender SP
273   if (for_compiler_entry) {
274     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : j_rarg0), "only valid assignment");
275     assert_different_registers(temp1, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5, j_rarg6, j_rarg7);
276     assert_different_registers(temp2, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5, j_rarg6, j_rarg7);
277     assert_different_registers(temp3, j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5, j_rarg6, j_rarg7);
278   }
279 
280   assert_different_registers(temp1, temp2, temp3, receiver_reg);
281   assert_different_registers(temp1, temp2, temp3, member_reg);
282 
283   if (iid == vmIntrinsics::_invokeBasic || iid == vmIntrinsics::_linkToNative) {
284     if (iid == vmIntrinsics::_linkToNative) {
285       assert(for_compiler_entry, "only compiler entry is supported");
286     }
287     // indirect through MH.form.vmentry.vmtarget
288     jump_to_lambda_form(_masm, receiver_reg, xmethod, temp1, for_compiler_entry);
289   } else {
290     // The method is a member invoker used by direct method handles.
291     if (VerifyMethodHandles) {
292       // make sure the trailing argument really is a MemberName (caller responsibility)
293       verify_klass(_masm, member_reg, VM_CLASS_ID(java_lang_invoke_MemberName),
294                    "MemberName required for invokeVirtual etc.");
295     }
296 
297     Address member_clazz(    member_reg, NONZERO(java_lang_invoke_MemberName::clazz_offset()));
298     Address member_vmindex(  member_reg, NONZERO(java_lang_invoke_MemberName::vmindex_offset()));
299     Address member_vmtarget( member_reg, NONZERO(java_lang_invoke_MemberName::method_offset()));
300     Address vmtarget_method( xmethod, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset()));
301 
302     Register temp1_recv_klass = temp1;
303     if (iid != vmIntrinsics::_linkToStatic) {
304       __ verify_oop(receiver_reg);
305       if (iid == vmIntrinsics::_linkToSpecial) {
306         // Don't actually load the klass; just null-check the receiver.
307         __ null_check(receiver_reg);
308       } else {
309         // load receiver klass itself
310         __ null_check(receiver_reg, oopDesc::klass_offset_in_bytes());
311         __ load_klass(temp1_recv_klass, receiver_reg);
312         __ verify_klass_ptr(temp1_recv_klass);
313       }
314       BLOCK_COMMENT("check_receiver {");
315       // The receiver for the MemberName must be in receiver_reg.
316       // Check the receiver against the MemberName.clazz
317       if (VerifyMethodHandles && iid == vmIntrinsics::_linkToSpecial) {
318         // Did not load it above...
319         __ load_klass(temp1_recv_klass, receiver_reg);
320         __ verify_klass_ptr(temp1_recv_klass);
321       }
322       if (VerifyMethodHandles && iid != vmIntrinsics::_linkToInterface) {
323         Label L_ok;
324         Register temp2_defc = temp2;
325         __ load_heap_oop(temp2_defc, member_clazz, temp3);
326         load_klass_from_Class(_masm, temp2_defc);
327         __ verify_klass_ptr(temp2_defc);
328         __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, L_ok);
329         // If we get here, the type check failed!
330         __ ebreak();
331         __ bind(L_ok);
332       }
333       BLOCK_COMMENT("} check_receiver");
334     }
335     if (iid == vmIntrinsics::_linkToSpecial ||
336         iid == vmIntrinsics::_linkToStatic) {
337       DEBUG_ONLY(temp1_recv_klass = noreg);  // these guys didn't load the recv_klass
338     }
339 
340     // Live registers at this point:
341     //  member_reg - MemberName that was the trailing argument
342     //  temp1_recv_klass - klass of stacked receiver, if needed
343     //  x30 - interpreter linkage (if interpreted)
344     //  x11 ... x10 - compiler arguments (if compiled)
345 
346     Label L_incompatible_class_change_error;
347     switch (iid) {
348       case vmIntrinsics::_linkToSpecial:
349         if (VerifyMethodHandles) {
350           verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp3);
351         }
352         __ load_heap_oop(xmethod, member_vmtarget);
353         __ access_load_at(T_ADDRESS, IN_HEAP, xmethod, vmtarget_method, noreg, noreg);
354         break;
355 
356       case vmIntrinsics::_linkToStatic:
357         if (VerifyMethodHandles) {
358           verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp3);
359         }
360         __ load_heap_oop(xmethod, member_vmtarget);
361         __ access_load_at(T_ADDRESS, IN_HEAP, xmethod, vmtarget_method, noreg, noreg);
362         break;
363 
364       case vmIntrinsics::_linkToVirtual:
365       {
366         // same as TemplateTable::invokevirtual,
367         // minus the CP setup and profiling:
368 
369         if (VerifyMethodHandles) {
370           verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp3);
371         }
372 
373         // pick out the vtable index from the MemberName, and then we can discard it:
374         Register temp2_index = temp2;
375         __ access_load_at(T_ADDRESS, IN_HEAP, temp2_index, member_vmindex, noreg, noreg);
376 
377         if (VerifyMethodHandles) {
378           Label L_index_ok;
379           __ bgez(temp2_index, L_index_ok);
380           __ ebreak();
381           __ BIND(L_index_ok);
382         }
383 
384         // Note:  The verifier invariants allow us to ignore MemberName.clazz and vmtarget
385         // at this point.  And VerifyMethodHandles has already checked clazz, if needed.
386 
387         // get target Method* & entry point
388         __ lookup_virtual_method(temp1_recv_klass, temp2_index, xmethod);
389         break;
390       }
391 
392       case vmIntrinsics::_linkToInterface:
393       {
394         // same as TemplateTable::invokeinterface
395         // (minus the CP setup and profiling, with different argument motion)
396         if (VerifyMethodHandles) {
397           verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp3);
398         }
399 
400         Register temp3_intf = temp3;
401         __ load_heap_oop(temp3_intf, member_clazz);
402         load_klass_from_Class(_masm, temp3_intf);
403         __ verify_klass_ptr(temp3_intf);
404 
405         Register rindex = xmethod;
406         __ access_load_at(T_ADDRESS, IN_HEAP, rindex, member_vmindex, noreg, noreg);
407         if (VerifyMethodHandles) {
408           Label L;
409           __ bgez(rindex, L);
410           __ ebreak();
411           __ bind(L);
412         }
413 
414         // given intf, index, and recv klass, dispatch to the implementation method
415         __ lookup_interface_method(temp1_recv_klass, temp3_intf,
416                                    // note: next two args must be the same:
417                                    rindex, xmethod,
418                                    temp2,
419                                    L_incompatible_class_change_error);
420         break;
421       }
422 
423       default:
424         fatal("unexpected intrinsic %d: %s", vmIntrinsics::as_int(iid), vmIntrinsics::name_at(iid));
425         break;
426     }
427 
428     // live at this point:  xmethod, x30 (if interpreted)
429 
430     // After figuring out which concrete method to call, jump into it.
431     // Note that this works in the interpreter with no data motion.
432     // But the compiled version will require that r2_recv be shifted out.
433     __ verify_method_ptr(xmethod);
434     jump_from_method_handle(_masm, xmethod, temp1, for_compiler_entry);
435     if (iid == vmIntrinsics::_linkToInterface) {
436       __ bind(L_incompatible_class_change_error);
437       __ far_jump(RuntimeAddress(StubRoutines::throw_IncompatibleClassChangeError_entry()));
438     }
439   }
440 
441 }
442 
443 #ifndef PRODUCT
444 void trace_method_handle_stub(const char* adaptername,
445                               oopDesc* mh,
446                               intptr_t* saved_regs,
447                               intptr_t* entry_sp) {  }
448 
449 // The stub wraps the arguments in a struct on the stack to avoid
450 // dealing with the different calling conventions for passing 6
451 // arguments.
452 struct MethodHandleStubArguments {
453   const char* adaptername;
454   oopDesc* mh;
455   intptr_t* saved_regs;
456   intptr_t* entry_sp;
457 };
458 void trace_method_handle_stub_wrapper(MethodHandleStubArguments* args) {  }
459 
460 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {  }
461 #endif //PRODUCT