602 // frame. Since we need to preserve callee-saved values (currently
603 // only for C2, but done for C1 as well) we need a callee-saved oop
604 // map and therefore have to make these stubs into RuntimeStubs
605 // rather than BufferBlobs. If the compiler needs all registers to
606 // be preserved between the fault point and the exception handler
607 // then it must assume responsibility for that in
608 // AbstractCompiler::continuation_for_implicit_null_exception or
609 // continuation_for_implicit_division_by_zero_exception. All other
610 // implicit exceptions (e.g., NullPointerException or
611 // AbstractMethodError on entry) are either at call sites or
612 // otherwise assume that stack unwinding will be initiated, so
613 // caller saved registers were assumed volatile in the compiler.
614 address generate_throw_exception(const char* name,
615 address runtime_entry,
616 Register arg1 = noreg,
617 Register arg2 = noreg);
618
619 // shared exception handler for FFM upcall stubs
620 address generate_upcall_stub_exception_handler();
621
622 // Specialized stub implementations for UseSecondarySupersTable.
623 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
624
625 // Slow path implementation for UseSecondarySupersTable.
626 address generate_lookup_secondary_supers_table_slow_path_stub();
627
628 void create_control_words();
629
630 // Initialization
631 void generate_initial_stubs();
632 void generate_continuation_stubs();
633 void generate_compiler_stubs();
634 void generate_final_stubs();
635
636 public:
637 StubGenerator(CodeBuffer* code, StubsKind kind);
638 };
639
640 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|
602 // frame. Since we need to preserve callee-saved values (currently
603 // only for C2, but done for C1 as well) we need a callee-saved oop
604 // map and therefore have to make these stubs into RuntimeStubs
605 // rather than BufferBlobs. If the compiler needs all registers to
606 // be preserved between the fault point and the exception handler
607 // then it must assume responsibility for that in
608 // AbstractCompiler::continuation_for_implicit_null_exception or
609 // continuation_for_implicit_division_by_zero_exception. All other
610 // implicit exceptions (e.g., NullPointerException or
611 // AbstractMethodError on entry) are either at call sites or
612 // otherwise assume that stack unwinding will be initiated, so
613 // caller saved registers were assumed volatile in the compiler.
614 address generate_throw_exception(const char* name,
615 address runtime_entry,
616 Register arg1 = noreg,
617 Register arg2 = noreg);
618
619 // shared exception handler for FFM upcall stubs
620 address generate_upcall_stub_exception_handler();
621
622 // interpreter or compiled code marshalling registers to/from inline type instance
623 address generate_return_value_stub(address destination, const char* name, bool has_res);
624
625 // Specialized stub implementations for UseSecondarySupersTable.
626 address generate_lookup_secondary_supers_table_stub(u1 super_klass_index);
627
628 // Slow path implementation for UseSecondarySupersTable.
629 address generate_lookup_secondary_supers_table_slow_path_stub();
630
631 void create_control_words();
632
633 // Initialization
634 void generate_initial_stubs();
635 void generate_continuation_stubs();
636 void generate_compiler_stubs();
637 void generate_final_stubs();
638
639 public:
640 StubGenerator(CodeBuffer* code, StubsKind kind);
641 };
642
643 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|