605 // only for C2, but done for C1 as well) we need a callee-saved oop
606 // map and therefore have to make these stubs into RuntimeStubs
607 // rather than BufferBlobs. If the compiler needs all registers to
608 // be preserved between the fault point and the exception handler
609 // then it must assume responsibility for that in
610 // AbstractCompiler::continuation_for_implicit_null_exception or
611 // continuation_for_implicit_division_by_zero_exception. All other
612 // implicit exceptions (e.g., NullPointerException or
613 // AbstractMethodError on entry) are either at call sites or
614 // otherwise assume that stack unwinding will be initiated, so
615 // caller saved registers were assumed volatile in the compiler.
616 address generate_throw_exception(const char* name,
617 address runtime_entry,
618 Register arg1 = noreg,
619 Register arg2 = noreg);
620
621 // shared exception handler for FFM upcall stubs
622 address generate_upcall_stub_exception_handler();
623 address generate_upcall_stub_load_target();
624
625 // Specialized stub implementations for UseSecondarySupersTable.
626 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
627
628 // Slow path implementation for UseSecondarySupersTable.
629 address generate_lookup_secondary_supers_table_slow_path_stub();
630
631 void create_control_words();
632
633 // Initialization
634 void generate_initial_stubs();
635 void generate_continuation_stubs();
636 void generate_compiler_stubs();
637 void generate_final_stubs();
638
639 public:
640 StubGenerator(CodeBuffer* code, StubsKind kind);
641 };
642
643 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|
605 // only for C2, but done for C1 as well) we need a callee-saved oop
606 // map and therefore have to make these stubs into RuntimeStubs
607 // rather than BufferBlobs. If the compiler needs all registers to
608 // be preserved between the fault point and the exception handler
609 // then it must assume responsibility for that in
610 // AbstractCompiler::continuation_for_implicit_null_exception or
611 // continuation_for_implicit_division_by_zero_exception. All other
612 // implicit exceptions (e.g., NullPointerException or
613 // AbstractMethodError on entry) are either at call sites or
614 // otherwise assume that stack unwinding will be initiated, so
615 // caller saved registers were assumed volatile in the compiler.
616 address generate_throw_exception(const char* name,
617 address runtime_entry,
618 Register arg1 = noreg,
619 Register arg2 = noreg);
620
621 // shared exception handler for FFM upcall stubs
622 address generate_upcall_stub_exception_handler();
623 address generate_upcall_stub_load_target();
624
625 // interpreter or compiled code marshalling registers to/from inline type instance
626 address generate_return_value_stub(address destination, const char* name, bool has_res);
627
628 // Specialized stub implementations for UseSecondarySupersTable.
629 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
630
631 // Slow path implementation for UseSecondarySupersTable.
632 address generate_lookup_secondary_supers_table_slow_path_stub();
633
634 void create_control_words();
635
636 // Initialization
637 void generate_initial_stubs();
638 void generate_continuation_stubs();
639 void generate_compiler_stubs();
640 void generate_final_stubs();
641
642 public:
643 StubGenerator(CodeBuffer* code, StubsKind kind);
644 };
645
646 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|