< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_32.cpp

Print this page




  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "gc/shared/barrierSet.hpp"
  29 #include "gc/shared/barrierSetAssembler.hpp"

  30 #include "interpreter/interpreter.hpp"
  31 #include "memory/universe.hpp"
  32 #include "nativeInst_x86.hpp"
  33 #include "oops/instanceOop.hpp"
  34 #include "oops/method.hpp"
  35 #include "oops/objArrayKlass.hpp"
  36 #include "oops/oop.inline.hpp"
  37 #include "prims/methodHandles.hpp"
  38 #include "runtime/frame.inline.hpp"
  39 #include "runtime/handles.inline.hpp"
  40 #include "runtime/sharedRuntime.hpp"
  41 #include "runtime/stubCodeGenerator.hpp"
  42 #include "runtime/stubRoutines.hpp"
  43 #include "runtime/thread.inline.hpp"
  44 #ifdef COMPILER2
  45 #include "opto/runtime.hpp"
  46 #endif
  47 
  48 // Declaration and definition of StubGenerator (no .hpp file).
  49 // For a more detailed description of the stub routine structure


3645     // Load *adr into eax, may fault.
3646     *fault_pc = __ pc();
3647     switch (size) {
3648       case 4:
3649         // int32_t
3650         __ movl(rax, Address(rcx, 0));
3651         break;
3652       case 8:
3653         // int64_t
3654         Unimplemented();
3655         break;
3656       default:
3657         ShouldNotReachHere();
3658     }
3659 
3660     // Return errValue or *adr.
3661     *continuation_pc = __ pc();
3662     __ ret(0);
3663   }
3664 






























































3665  public:
3666   // Information about frame layout at time of blocking runtime call.
3667   // Note that we only have to preserve callee-saved registers since
3668   // the compilers are responsible for supplying a continuation point
3669   // if they expect all registers to be preserved.
3670   enum layout {
3671     thread_off,    // last_java_sp
3672     arg1_off,
3673     arg2_off,
3674     rbp_off,       // callee saved register
3675     ret_pc,
3676     framesize
3677   };
3678 
3679  private:
3680 
3681 #undef  __
3682 #define __ masm->
3683 
3684   //------------------------------------------------------------------------------------------------------------------------


3941       StubRoutines::x86::_k256_adr = (address)StubRoutines::x86::_k256;
3942       StubRoutines::x86::_pshuffle_byte_flip_mask_addr = generate_pshuffle_byte_flip_mask();
3943       StubRoutines::_sha256_implCompress = generate_sha256_implCompress(false, "sha256_implCompress");
3944       StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(true, "sha256_implCompressMB");
3945     }
3946 
3947     // Generate GHASH intrinsics code
3948     if (UseGHASHIntrinsics) {
3949       StubRoutines::x86::_ghash_long_swap_mask_addr = generate_ghash_long_swap_mask();
3950       StubRoutines::x86::_ghash_byte_swap_mask_addr = generate_ghash_byte_swap_mask();
3951       StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
3952     }
3953 
3954     // Safefetch stubs.
3955     generate_safefetch("SafeFetch32", sizeof(int), &StubRoutines::_safefetch32_entry,
3956                                                    &StubRoutines::_safefetch32_fault_pc,
3957                                                    &StubRoutines::_safefetch32_continuation_pc);
3958     StubRoutines::_safefetchN_entry           = StubRoutines::_safefetch32_entry;
3959     StubRoutines::_safefetchN_fault_pc        = StubRoutines::_safefetch32_fault_pc;
3960     StubRoutines::_safefetchN_continuation_pc = StubRoutines::_safefetch32_continuation_pc;
3961   }
3962 





3963 
3964  public:
3965   StubGenerator(CodeBuffer* code, bool all) : StubCodeGenerator(code) {
3966     if (all) {
3967       generate_all();
3968     } else {
3969       generate_initial();
3970     }
3971   }
3972 }; // end class declaration
3973 
3974 #define UCM_TABLE_MAX_ENTRIES 8
3975 void StubGenerator_generate(CodeBuffer* code, bool all) {
3976   if (UnsafeCopyMemory::_table == NULL) {
3977     UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
3978   }
3979   StubGenerator g(code, all);
3980 }


  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "asm/macroAssembler.hpp"
  27 #include "asm/macroAssembler.inline.hpp"
  28 #include "gc/shared/barrierSet.hpp"
  29 #include "gc/shared/barrierSetAssembler.hpp"
  30 #include "gc/shared/barrierSetNMethod.hpp"
  31 #include "interpreter/interpreter.hpp"
  32 #include "memory/universe.hpp"
  33 #include "nativeInst_x86.hpp"
  34 #include "oops/instanceOop.hpp"
  35 #include "oops/method.hpp"
  36 #include "oops/objArrayKlass.hpp"
  37 #include "oops/oop.inline.hpp"
  38 #include "prims/methodHandles.hpp"
  39 #include "runtime/frame.inline.hpp"
  40 #include "runtime/handles.inline.hpp"
  41 #include "runtime/sharedRuntime.hpp"
  42 #include "runtime/stubCodeGenerator.hpp"
  43 #include "runtime/stubRoutines.hpp"
  44 #include "runtime/thread.inline.hpp"
  45 #ifdef COMPILER2
  46 #include "opto/runtime.hpp"
  47 #endif
  48 
  49 // Declaration and definition of StubGenerator (no .hpp file).
  50 // For a more detailed description of the stub routine structure


3646     // Load *adr into eax, may fault.
3647     *fault_pc = __ pc();
3648     switch (size) {
3649       case 4:
3650         // int32_t
3651         __ movl(rax, Address(rcx, 0));
3652         break;
3653       case 8:
3654         // int64_t
3655         Unimplemented();
3656         break;
3657       default:
3658         ShouldNotReachHere();
3659     }
3660 
3661     // Return errValue or *adr.
3662     *continuation_pc = __ pc();
3663     __ ret(0);
3664   }
3665 
3666   address generate_method_entry_barrier() {
3667     __ align(CodeEntryAlignment);
3668     StubCodeMark mark(this, "StubRoutines", "nmethod_entry_barrier");
3669 
3670     Label deoptimize_label;
3671 
3672     address start = __ pc();
3673 
3674     __ push(-1); // cookie, this is used for writing the new rsp when deoptimizing
3675 
3676     BLOCK_COMMENT("Entry:");
3677     __ enter(); // save rbp
3678 
3679     // save rbx, because we want to use that value.
3680     // We could do without it but then we depend on the number of slots used by pusha
3681     __ push(rbx);
3682 
3683     __ lea(rbx, Address(rsp, wordSize * 3)); // 1 for cookie, 1 for rbp, 1 for rbx - this should be the return address
3684 
3685     __ pusha();
3686 
3687     // xmm0 and xmm1 may be used for passing float/double arguments
3688     const int xmm_size = wordSize * 2;
3689     const int xmm_spill_size = xmm_size * 2;
3690     __ subptr(rsp, xmm_spill_size);
3691     __ movdqu(Address(rsp, xmm_size * 1), xmm1);
3692     __ movdqu(Address(rsp, xmm_size * 0), xmm0);
3693 
3694     __ call_VM_leaf(CAST_FROM_FN_PTR(address, static_cast<int (*)(address*)>(BarrierSetNMethod::nmethod_stub_entry_barrier)), rbx);
3695 
3696     __ movdqu(xmm0, Address(rsp, xmm_size * 0));
3697     __ movdqu(xmm1, Address(rsp, xmm_size * 1));
3698     __ addptr(rsp, xmm_spill_size);
3699 
3700     __ cmpl(rax, 1); // 1 means deoptimize
3701     __ jcc(Assembler::equal, deoptimize_label);
3702 
3703     __ popa();
3704     __ pop(rbx);
3705 
3706     __ leave();
3707 
3708     __ addptr(rsp, 1 * wordSize); // cookie
3709     __ ret(0);
3710 
3711     __ BIND(deoptimize_label);
3712 
3713     __ popa();
3714     __ pop(rbx);
3715 
3716     __ leave();
3717 
3718     // this can be taken out, but is good for verification purposes. getting a SIGSEGV
3719     // here while still having a correct stack is valuable
3720     __ testptr(rsp, Address(rsp, 0));
3721 
3722     __ movptr(rsp, Address(rsp, 0)); // new rsp was written in the barrier
3723     __ jmp(Address(rsp, -1 * wordSize)); // jmp target should be callers verified_entry_point
3724 
3725     return start;
3726   }
3727 
3728  public:
3729   // Information about frame layout at time of blocking runtime call.
3730   // Note that we only have to preserve callee-saved registers since
3731   // the compilers are responsible for supplying a continuation point
3732   // if they expect all registers to be preserved.
3733   enum layout {
3734     thread_off,    // last_java_sp
3735     arg1_off,
3736     arg2_off,
3737     rbp_off,       // callee saved register
3738     ret_pc,
3739     framesize
3740   };
3741 
3742  private:
3743 
3744 #undef  __
3745 #define __ masm->
3746 
3747   //------------------------------------------------------------------------------------------------------------------------


4004       StubRoutines::x86::_k256_adr = (address)StubRoutines::x86::_k256;
4005       StubRoutines::x86::_pshuffle_byte_flip_mask_addr = generate_pshuffle_byte_flip_mask();
4006       StubRoutines::_sha256_implCompress = generate_sha256_implCompress(false, "sha256_implCompress");
4007       StubRoutines::_sha256_implCompressMB = generate_sha256_implCompress(true, "sha256_implCompressMB");
4008     }
4009 
4010     // Generate GHASH intrinsics code
4011     if (UseGHASHIntrinsics) {
4012       StubRoutines::x86::_ghash_long_swap_mask_addr = generate_ghash_long_swap_mask();
4013       StubRoutines::x86::_ghash_byte_swap_mask_addr = generate_ghash_byte_swap_mask();
4014       StubRoutines::_ghash_processBlocks = generate_ghash_processBlocks();
4015     }
4016 
4017     // Safefetch stubs.
4018     generate_safefetch("SafeFetch32", sizeof(int), &StubRoutines::_safefetch32_entry,
4019                                                    &StubRoutines::_safefetch32_fault_pc,
4020                                                    &StubRoutines::_safefetch32_continuation_pc);
4021     StubRoutines::_safefetchN_entry           = StubRoutines::_safefetch32_entry;
4022     StubRoutines::_safefetchN_fault_pc        = StubRoutines::_safefetch32_fault_pc;
4023     StubRoutines::_safefetchN_continuation_pc = StubRoutines::_safefetch32_continuation_pc;

4024 
4025     BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
4026     if (bs_nm != NULL) {
4027       StubRoutines::x86::_method_entry_barrier = generate_method_entry_barrier();
4028     }
4029   }
4030 
4031  public:
4032   StubGenerator(CodeBuffer* code, bool all) : StubCodeGenerator(code) {
4033     if (all) {
4034       generate_all();
4035     } else {
4036       generate_initial();
4037     }
4038   }
4039 }; // end class declaration
4040 
4041 #define UCM_TABLE_MAX_ENTRIES 8
4042 void StubGenerator_generate(CodeBuffer* code, bool all) {
4043   if (UnsafeCopyMemory::_table == NULL) {
4044     UnsafeCopyMemory::create_table(UCM_TABLE_MAX_ENTRIES);
4045   }
4046   StubGenerator g(code, all);
4047 }
< prev index next >