< prev index next >

src/hotspot/cpu/x86/methodHandles_x86.cpp

Print this page

186 
187   if (VerifyMethodHandles && !for_compiler_entry) {
188     // make sure recv is already on stack
189     __ movptr(temp2, Address(method_temp, Method::const_offset()));
190     __ load_sized_value(temp2,
191                         Address(temp2, ConstMethod::size_of_parameters_offset()),
192                         sizeof(u2), /*is_signed*/ false);
193     // assert(sizeof(u2) == sizeof(Method::_size_of_parameters), "");
194     Label L;
195     __ cmpoop(recv, __ argument_address(temp2, -1));
196     __ jcc(Assembler::equal, L);
197     __ movptr(rax, __ argument_address(temp2, -1));
198     __ STOP("receiver not on stack");
199     __ BIND(L);
200   }
201 
202   jump_from_method_handle(_masm, method_temp, temp2, for_compiler_entry);
203   BLOCK_COMMENT("} jump_to_lambda_form");
204 }
205 















206 
207 // Code generation
208 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
209                                                                 vmIntrinsics::ID iid) {
210   const bool not_for_compiler_entry = false;  // this is the interpreter entry
211   assert(is_signature_polymorphic(iid), "expected invoke iid");
212   if (iid == vmIntrinsics::_invokeGeneric ||
213       iid == vmIntrinsics::_compiledLambdaForm) {
214     // Perhaps surprisingly, the symbolic references visible to Java are not directly used.
215     // They are linked to Java-generated adapters via MethodHandleNatives.linkMethod.
216     // They all allow an appendix argument.
217     __ hlt();           // empty stubs make SG sick
218     return NULL;
219   }
220 
221   // No need in interpreter entry for linkToNative for now.
222   // Interpreter calls compiled entry through i2c.
223   if (iid == vmIntrinsics::_linkToNative) {
224     __ hlt();
225     return NULL;

297     __ push(rax_temp);          // re-push return address
298     generate_method_handle_dispatch(_masm, iid, rcx_recv, rbx_member, not_for_compiler_entry);
299   }
300 
301   return entry_point;
302 }
303 
304 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
305                                                     vmIntrinsics::ID iid,
306                                                     Register receiver_reg,
307                                                     Register member_reg,
308                                                     bool for_compiler_entry) {
309   assert(is_signature_polymorphic(iid), "expected invoke iid");
310   Register rbx_method = rbx;   // eventual target of this invocation
311   // temps used in this code are not used in *either* compiled or interpreted calling sequences
312 #ifdef _LP64
313   Register temp1 = rscratch1;
314   Register temp2 = rscratch2;
315   Register temp3 = rax;
316   if (for_compiler_entry) {
317     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : j_rarg0), "only valid assignment");
318     assert_different_registers(temp1,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
319     assert_different_registers(temp2,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
320     assert_different_registers(temp3,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
321   }
322 #else
323   Register temp1 = (for_compiler_entry ? rsi : rdx);
324   Register temp2 = rdi;
325   Register temp3 = rax;
326   if (for_compiler_entry) {
327     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic ? noreg : rcx), "only valid assignment");
328     assert_different_registers(temp1,        rcx, rdx);
329     assert_different_registers(temp2,        rcx, rdx);
330     assert_different_registers(temp3,        rcx, rdx);
331   }
332 #endif
333   else {
334     assert_different_registers(temp1, temp2, temp3, saved_last_sp_register());  // don't trash lastSP
335   }
336   assert_different_registers(temp1, temp2, temp3, receiver_reg);
337   assert_different_registers(temp1, temp2, temp3, member_reg);
338 
339   if (iid == vmIntrinsics::_invokeBasic || iid == vmIntrinsics::_linkToNative) {
340     if (iid == vmIntrinsics::_linkToNative) {
341       assert(for_compiler_entry, "only compiler entry is supported");
342     }
343     // indirect through MH.form.vmentry.vmtarget
344     jump_to_lambda_form(_masm, receiver_reg, rbx_method, temp1, for_compiler_entry);
345 


346   } else {
347     // The method is a member invoker used by direct method handles.
348     if (VerifyMethodHandles) {
349       // make sure the trailing argument really is a MemberName (caller responsibility)
350       verify_klass(_masm, member_reg, VM_CLASS_ID(java_lang_invoke_MemberName),
351                    "MemberName required for invokeVirtual etc.");
352     }
353 
354     Address member_clazz(    member_reg, NONZERO(java_lang_invoke_MemberName::clazz_offset()));
355     Address member_vmindex(  member_reg, NONZERO(java_lang_invoke_MemberName::vmindex_offset()));
356     Address member_vmtarget( member_reg, NONZERO(java_lang_invoke_MemberName::method_offset()));
357     Address vmtarget_method( rbx_method, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset()));
358 
359     Register temp1_recv_klass = temp1;
360     if (iid != vmIntrinsics::_linkToStatic) {
361       __ verify_oop(receiver_reg);
362       if (iid == vmIntrinsics::_linkToSpecial) {
363         // Don't actually load the klass; just null-check the receiver.
364         __ null_check(receiver_reg);
365       } else {

186 
187   if (VerifyMethodHandles && !for_compiler_entry) {
188     // make sure recv is already on stack
189     __ movptr(temp2, Address(method_temp, Method::const_offset()));
190     __ load_sized_value(temp2,
191                         Address(temp2, ConstMethod::size_of_parameters_offset()),
192                         sizeof(u2), /*is_signed*/ false);
193     // assert(sizeof(u2) == sizeof(Method::_size_of_parameters), "");
194     Label L;
195     __ cmpoop(recv, __ argument_address(temp2, -1));
196     __ jcc(Assembler::equal, L);
197     __ movptr(rax, __ argument_address(temp2, -1));
198     __ STOP("receiver not on stack");
199     __ BIND(L);
200   }
201 
202   jump_from_method_handle(_masm, method_temp, temp2, for_compiler_entry);
203   BLOCK_COMMENT("} jump_to_lambda_form");
204 }
205 
206 void MethodHandles::jump_to_native_invoker(MacroAssembler* _masm, Register nep_reg, Register temp_target) {
207   BLOCK_COMMENT("jump_to_native_invoker {");
208   assert_different_registers(nep_reg, temp_target);
209   assert(nep_reg != noreg, "required register");
210 
211   // Load the invoker, as NEP -> .invoker
212   __ verify_oop(nep_reg);
213   __ access_load_at(T_ADDRESS, IN_HEAP, temp_target,
214                     Address(nep_reg, NONZERO(jdk_internal_invoke_NativeEntryPoint::invoker_offset_in_bytes())),
215                     noreg, noreg);
216 
217   __ jmp(temp_target);
218   BLOCK_COMMENT("} jump_to_native_invoker");
219 }
220 
221 
222 // Code generation
223 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
224                                                                 vmIntrinsics::ID iid) {
225   const bool not_for_compiler_entry = false;  // this is the interpreter entry
226   assert(is_signature_polymorphic(iid), "expected invoke iid");
227   if (iid == vmIntrinsics::_invokeGeneric ||
228       iid == vmIntrinsics::_compiledLambdaForm) {
229     // Perhaps surprisingly, the symbolic references visible to Java are not directly used.
230     // They are linked to Java-generated adapters via MethodHandleNatives.linkMethod.
231     // They all allow an appendix argument.
232     __ hlt();           // empty stubs make SG sick
233     return NULL;
234   }
235 
236   // No need in interpreter entry for linkToNative for now.
237   // Interpreter calls compiled entry through i2c.
238   if (iid == vmIntrinsics::_linkToNative) {
239     __ hlt();
240     return NULL;

312     __ push(rax_temp);          // re-push return address
313     generate_method_handle_dispatch(_masm, iid, rcx_recv, rbx_member, not_for_compiler_entry);
314   }
315 
316   return entry_point;
317 }
318 
319 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
320                                                     vmIntrinsics::ID iid,
321                                                     Register receiver_reg,
322                                                     Register member_reg,
323                                                     bool for_compiler_entry) {
324   assert(is_signature_polymorphic(iid), "expected invoke iid");
325   Register rbx_method = rbx;   // eventual target of this invocation
326   // temps used in this code are not used in *either* compiled or interpreted calling sequences
327 #ifdef _LP64
328   Register temp1 = rscratch1;
329   Register temp2 = rscratch2;
330   Register temp3 = rax;
331   if (for_compiler_entry) {
332     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic || iid == vmIntrinsics::_linkToNative ? noreg : j_rarg0), "only valid assignment");
333     assert_different_registers(temp1,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
334     assert_different_registers(temp2,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
335     assert_different_registers(temp3,        j_rarg0, j_rarg1, j_rarg2, j_rarg3, j_rarg4, j_rarg5);
336   }
337 #else
338   Register temp1 = (for_compiler_entry ? rsi : rdx);
339   Register temp2 = rdi;
340   Register temp3 = rax;
341   if (for_compiler_entry) {
342     assert(receiver_reg == (iid == vmIntrinsics::_linkToStatic || iid == vmIntrinsics::_linkToNative ? noreg : rcx), "only valid assignment");
343     assert_different_registers(temp1,        rcx, rdx);
344     assert_different_registers(temp2,        rcx, rdx);
345     assert_different_registers(temp3,        rcx, rdx);
346   }
347 #endif
348   else {
349     assert_different_registers(temp1, temp2, temp3, saved_last_sp_register());  // don't trash lastSP
350   }
351   assert_different_registers(temp1, temp2, temp3, receiver_reg);
352   assert_different_registers(temp1, temp2, temp3, member_reg);
353 
354   if (iid == vmIntrinsics::_invokeBasic) {



355     // indirect through MH.form.vmentry.vmtarget
356     jump_to_lambda_form(_masm, receiver_reg, rbx_method, temp1, for_compiler_entry);
357   } else if (iid == vmIntrinsics::_linkToNative) {
358     assert(for_compiler_entry, "only compiler entry is supported");
359     jump_to_native_invoker(_masm, member_reg, temp1);
360   } else {
361     // The method is a member invoker used by direct method handles.
362     if (VerifyMethodHandles) {
363       // make sure the trailing argument really is a MemberName (caller responsibility)
364       verify_klass(_masm, member_reg, VM_CLASS_ID(java_lang_invoke_MemberName),
365                    "MemberName required for invokeVirtual etc.");
366     }
367 
368     Address member_clazz(    member_reg, NONZERO(java_lang_invoke_MemberName::clazz_offset()));
369     Address member_vmindex(  member_reg, NONZERO(java_lang_invoke_MemberName::vmindex_offset()));
370     Address member_vmtarget( member_reg, NONZERO(java_lang_invoke_MemberName::method_offset()));
371     Address vmtarget_method( rbx_method, NONZERO(java_lang_invoke_ResolvedMethodName::vmtarget_offset()));
372 
373     Register temp1_recv_klass = temp1;
374     if (iid != vmIntrinsics::_linkToStatic) {
375       __ verify_oop(receiver_reg);
376       if (iid == vmIntrinsics::_linkToSpecial) {
377         // Don't actually load the klass; just null-check the receiver.
378         __ null_check(receiver_reg);
379       } else {
< prev index next >