446 // created. It's entire job is to simply free this storage.
447 static void OSR_migration_end(intptr_t* buf);
448
449 // Convert a sig into a calling convention register layout
450 // and find interesting things about it.
451 static VMRegPair* find_callee_arguments(Symbol* sig, bool has_receiver, bool has_appendix, int *arg_size);
452 static VMReg name_for_receiver();
453
454 // "Top of Stack" slots that may be unused by the calling convention but must
455 // otherwise be preserved.
456 // On Intel these are not necessary and the value can be zero.
457 // On Sparc this describes the words reserved for storing a register window
458 // when an interrupt occurs.
459 static uint out_preserve_stack_slots();
460
461 // Stack slots that may be unused by the calling convention but must
462 // otherwise be preserved. On Intel this includes the return address.
463 // On PowerPC it includes the 4 words holding the old TOC & LR glue.
464 static uint in_preserve_stack_slots();
465
466 // Is vector's size (in bytes) bigger than a size saved by default?
467 // For example, on x86 16 bytes XMM registers are saved by default.
468 static bool is_wide_vector(int size);
469
470 // Save and restore a native result
471 static void save_native_result(MacroAssembler *_masm, BasicType ret_type, int frame_slots);
472 static void restore_native_result(MacroAssembler *_masm, BasicType ret_type, int frame_slots);
473
474 // Generate a native wrapper for a given method. The method takes arguments
475 // in the Java compiled code convention, marshals them to the native
476 // convention (handlizes oops, etc), transitions to native, makes the call,
477 // returns to java state (possibly blocking), unhandlizes any result and
478 // returns.
479 //
480 // The wrapper may contain special-case code if the given method
481 // is a compiled method handle adapter, such as _invokeBasic, _linkToVirtual, etc.
482 static nmethod* generate_native_wrapper(MacroAssembler* masm,
483 const methodHandle& method,
484 int compile_id,
485 BasicType* sig_bt,
486 VMRegPair* regs,
487 BasicType ret_type);
488
489 // A compiled caller has just called the interpreter, but compiled code
490 // exists. Patch the caller so he no longer calls into the interpreter.
491 static void fixup_callers_callsite(Method* moop, address ret_pc);
492 static bool should_fixup_call_destination(address destination, address entry_point, address caller_pc, Method* moop, CodeBlob* cb);
493
494 // Slow-path Locking and Unlocking
495 static void complete_monitor_locking_C(oopDesc* obj, BasicLock* lock, JavaThread* current);
496 static void complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current);
497
498 // Resolving of calls
499 static address resolve_static_call_C (JavaThread* current);
500 static address resolve_virtual_call_C (JavaThread* current);
501 static address resolve_opt_virtual_call_C(JavaThread* current);
502
503 // arraycopy, the non-leaf version. (See StubRoutines for all the leaf calls.)
504 static void slow_arraycopy_C(oopDesc* src, jint src_pos,
505 oopDesc* dest, jint dest_pos,
506 jint length, JavaThread* thread);
507
508 // handle ic miss with caller being compiled code
509 // wrong method handling (inline cache misses)
510 static address handle_wrong_method(JavaThread* current);
511 static address handle_wrong_method_abstract(JavaThread* current);
512 static address handle_wrong_method_ic_miss(JavaThread* current);
513
514 static address handle_unsafe_access(JavaThread* thread, address next_pc);
515
516 #ifndef PRODUCT
|
446 // created. It's entire job is to simply free this storage.
447 static void OSR_migration_end(intptr_t* buf);
448
449 // Convert a sig into a calling convention register layout
450 // and find interesting things about it.
451 static VMRegPair* find_callee_arguments(Symbol* sig, bool has_receiver, bool has_appendix, int *arg_size);
452 static VMReg name_for_receiver();
453
454 // "Top of Stack" slots that may be unused by the calling convention but must
455 // otherwise be preserved.
456 // On Intel these are not necessary and the value can be zero.
457 // On Sparc this describes the words reserved for storing a register window
458 // when an interrupt occurs.
459 static uint out_preserve_stack_slots();
460
461 // Stack slots that may be unused by the calling convention but must
462 // otherwise be preserved. On Intel this includes the return address.
463 // On PowerPC it includes the 4 words holding the old TOC & LR glue.
464 static uint in_preserve_stack_slots();
465
466 static VMReg thread_register();
467
468 static void continuation_enter_cleanup(MacroAssembler* masm);
469
470 // Is vector's size (in bytes) bigger than a size saved by default?
471 // For example, on x86 16 bytes XMM registers are saved by default.
472 static bool is_wide_vector(int size);
473
474 // Save and restore a native result
475 static void save_native_result(MacroAssembler *_masm, BasicType ret_type, int frame_slots);
476 static void restore_native_result(MacroAssembler *_masm, BasicType ret_type, int frame_slots);
477
478 // Generate a native wrapper for a given method. The method takes arguments
479 // in the Java compiled code convention, marshals them to the native
480 // convention (handlizes oops, etc), transitions to native, makes the call,
481 // returns to java state (possibly blocking), unhandlizes any result and
482 // returns.
483 //
484 // The wrapper may contain special-case code if the given method
485 // is a compiled method handle adapter, such as _invokeBasic, _linkToVirtual, etc.
486 static nmethod* generate_native_wrapper(MacroAssembler* masm,
487 const methodHandle& method,
488 int compile_id,
489 BasicType* sig_bt,
490 VMRegPair* regs,
491 BasicType ret_type);
492
493 // A compiled caller has just called the interpreter, but compiled code
494 // exists. Patch the caller so he no longer calls into the interpreter.
495 static void fixup_callers_callsite(Method* moop, address ret_pc);
496 static bool should_fixup_call_destination(address destination, address entry_point, address caller_pc, Method* moop, CodeBlob* cb);
497
498 // Slow-path Locking and Unlocking
499 static void complete_monitor_locking_C(oopDesc* obj, BasicLock* lock, JavaThread* current);
500 static void complete_monitor_unlocking_C(oopDesc* obj, BasicLock* lock, JavaThread* current);
501 static void redo_monitorenter(JavaThread* current, ObjectMonitor* monitor);
502
503 // Resolving of calls
504 static address resolve_static_call_C (JavaThread* current);
505 static address resolve_virtual_call_C (JavaThread* current);
506 static address resolve_opt_virtual_call_C(JavaThread* current);
507
508 // arraycopy, the non-leaf version. (See StubRoutines for all the leaf calls.)
509 static void slow_arraycopy_C(oopDesc* src, jint src_pos,
510 oopDesc* dest, jint dest_pos,
511 jint length, JavaThread* thread);
512
513 // handle ic miss with caller being compiled code
514 // wrong method handling (inline cache misses)
515 static address handle_wrong_method(JavaThread* current);
516 static address handle_wrong_method_abstract(JavaThread* current);
517 static address handle_wrong_method_ic_miss(JavaThread* current);
518
519 static address handle_unsafe_access(JavaThread* thread, address next_pc);
520
521 #ifndef PRODUCT
|