609 // only for C2, but done for C1 as well) we need a callee-saved oop
610 // map and therefore have to make these stubs into RuntimeStubs
611 // rather than BufferBlobs. If the compiler needs all registers to
612 // be preserved between the fault point and the exception handler
613 // then it must assume responsibility for that in
614 // AbstractCompiler::continuation_for_implicit_null_exception or
615 // continuation_for_implicit_division_by_zero_exception. All other
616 // implicit exceptions (e.g., NullPointerException or
617 // AbstractMethodError on entry) are either at call sites or
618 // otherwise assume that stack unwinding will be initiated, so
619 // caller saved registers were assumed volatile in the compiler.
620 address generate_throw_exception(const char* name,
621 address runtime_entry,
622 Register arg1 = noreg,
623 Register arg2 = noreg);
624
625 // shared exception handler for FFM upcall stubs
626 address generate_upcall_stub_exception_handler();
627 address generate_upcall_stub_load_target();
628
629 // Specialized stub implementations for UseSecondarySupersTable.
630 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
631
632 // Slow path implementation for UseSecondarySupersTable.
633 address generate_lookup_secondary_supers_table_slow_path_stub();
634
635 void create_control_words();
636
637 // Initialization
638 void generate_initial_stubs();
639 void generate_continuation_stubs();
640 void generate_compiler_stubs();
641 void generate_final_stubs();
642
643 public:
644 StubGenerator(CodeBuffer* code, StubsKind kind);
645 };
646
647 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|
609 // only for C2, but done for C1 as well) we need a callee-saved oop
610 // map and therefore have to make these stubs into RuntimeStubs
611 // rather than BufferBlobs. If the compiler needs all registers to
612 // be preserved between the fault point and the exception handler
613 // then it must assume responsibility for that in
614 // AbstractCompiler::continuation_for_implicit_null_exception or
615 // continuation_for_implicit_division_by_zero_exception. All other
616 // implicit exceptions (e.g., NullPointerException or
617 // AbstractMethodError on entry) are either at call sites or
618 // otherwise assume that stack unwinding will be initiated, so
619 // caller saved registers were assumed volatile in the compiler.
620 address generate_throw_exception(const char* name,
621 address runtime_entry,
622 Register arg1 = noreg,
623 Register arg2 = noreg);
624
625 // shared exception handler for FFM upcall stubs
626 address generate_upcall_stub_exception_handler();
627 address generate_upcall_stub_load_target();
628
629 // interpreter or compiled code marshalling registers to/from inline type instance
630 address generate_return_value_stub(address destination, const char* name, bool has_res);
631
632 // Specialized stub implementations for UseSecondarySupersTable.
633 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
634
635 // Slow path implementation for UseSecondarySupersTable.
636 address generate_lookup_secondary_supers_table_slow_path_stub();
637
638 void create_control_words();
639
640 // Initialization
641 void generate_initial_stubs();
642 void generate_continuation_stubs();
643 void generate_compiler_stubs();
644 void generate_final_stubs();
645
646 public:
647 StubGenerator(CodeBuffer* code, StubsKind kind);
648 };
649
650 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|