< prev index next >

src/hotspot/cpu/x86/x86_64.ad

Print this page




2099   %}
2100 
2101   enc_class clear_avx %{
2102     debug_only(int off0 = cbuf.insts_size());
2103     if (generate_vzeroupper(Compile::current())) {
2104       // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
2105       // Clear upper bits of YMM registers when current compiled code uses
2106       // wide vectors to avoid AVX <-> SSE transition penalty during call.
2107       MacroAssembler _masm(&cbuf);
2108       __ vzeroupper();
2109     }
2110     debug_only(int off1 = cbuf.insts_size());
2111     assert(off1 - off0 == clear_avx_size(), "correct size prediction");
2112   %}
2113 
2114   enc_class Java_To_Runtime(method meth) %{
2115     // No relocation needed
2116     MacroAssembler _masm(&cbuf);
2117     __ mov64(r10, (int64_t) $meth$$method);
2118     __ call(r10);

2119   %}
2120 
2121   enc_class Java_To_Interpreter(method meth)
2122   %{
2123     // CALL Java_To_Interpreter
2124     // This is the instruction starting address for relocation info.
2125     cbuf.set_insts_mark();
2126     $$$emit8$primary;
2127     // CALL directly to the runtime
2128     emit_d32_reloc(cbuf,
2129                    (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2130                    runtime_call_Relocation::spec(),
2131                    RELOC_DISP32);

2132   %}
2133 
2134   enc_class Java_Static_Call(method meth)
2135   %{
2136     // JAVA STATIC CALL
2137     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to
2138     // determine who we intended to call.

2139     cbuf.set_insts_mark();
2140     $$$emit8$primary;
2141 
2142     if (!_method) {
2143       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2144                      runtime_call_Relocation::spec(),
2145                      RELOC_DISP32);
2146     } else {
2147       int method_index = resolved_method_index(cbuf);
2148       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
2149                                                   : static_call_Relocation::spec(method_index);
2150       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2151                      rspec, RELOC_DISP32);
2152       // Emit stubs for static call.
2153       address mark = cbuf.insts_mark();
2154       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
2155       if (stub == NULL) {
2156         ciEnv::current()->record_failure("CodeCache is full");
2157         return;
2158       }
2159 #if INCLUDE_AOT
2160       CompiledStaticCall::emit_to_aot_stub(cbuf, mark);
2161 #endif
2162     }

2163   %}
2164 
2165   enc_class Java_Dynamic_Call(method meth) %{
2166     MacroAssembler _masm(&cbuf);
2167     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));

2168   %}
2169 
2170   enc_class Java_Compiled_Call(method meth)
2171   %{
2172     // JAVA COMPILED CALL
2173     int disp = in_bytes(Method:: from_compiled_offset());
2174 
2175     // XXX XXX offset is 128 is 1.5 NON-PRODUCT !!!
2176     // assert(-0x80 <= disp && disp < 0x80, "compiled_code_offset isn't small");
2177 
2178     // callq *disp(%rax)

2179     cbuf.set_insts_mark();
2180     $$$emit8$primary;
2181     if (disp < 0x80) {
2182       emit_rm(cbuf, 0x01, $secondary, RAX_enc); // R/M byte
2183       emit_d8(cbuf, disp); // Displacement
2184     } else {
2185       emit_rm(cbuf, 0x02, $secondary, RAX_enc); // R/M byte
2186       emit_d32(cbuf, disp); // Displacement
2187     }

2188   %}
2189 
2190   enc_class reg_opc_imm(rRegI dst, immI8 shift)
2191   %{
2192     // SAL, SAR, SHR
2193     int dstenc = $dst$$reg;
2194     if (dstenc >= 8) {
2195       emit_opcode(cbuf, Assembler::REX_B);
2196       dstenc -= 8;
2197     }
2198     $$$emit8$primary;
2199     emit_rm(cbuf, 0x3, $secondary, dstenc);
2200     $$$emit8$shift$$constant;
2201   %}
2202 
2203   enc_class reg_opc_imm_wide(rRegL dst, immI8 shift)
2204   %{
2205     // SAL, SAR, SHR
2206     int dstenc = $dst$$reg;
2207     if (dstenc < 8) {


11601     __ byte_array_inflate($src$$Register, $dst$$Register, $len$$Register,
11602                           $tmp1$$XMMRegister, $tmp2$$Register);
11603   %}
11604   ins_pipe( pipe_slow );
11605 %}
11606 
11607 // encode char[] to byte[] in ISO_8859_1
11608 instruct encode_iso_array(rsi_RegP src, rdi_RegP dst, rdx_RegI len,
11609                           legVecS tmp1, legVecS tmp2, legVecS tmp3, legVecS tmp4,
11610                           rcx_RegI tmp5, rax_RegI result, rFlagsReg cr) %{
11611   match(Set result (EncodeISOArray src (Binary dst len)));
11612   effect(TEMP tmp1, TEMP tmp2, TEMP tmp3, TEMP tmp4, USE_KILL src, USE_KILL dst, USE_KILL len, KILL tmp5, KILL cr);
11613 
11614   format %{ "Encode array $src,$dst,$len -> $result    // KILL RCX, RDX, $tmp1, $tmp2, $tmp3, $tmp4, RSI, RDI " %}
11615   ins_encode %{
11616     __ encode_iso_array($src$$Register, $dst$$Register, $len$$Register,
11617                         $tmp1$$XMMRegister, $tmp2$$XMMRegister, $tmp3$$XMMRegister,
11618                         $tmp4$$XMMRegister, $tmp5$$Register, $result$$Register);
11619   %}
11620   ins_pipe( pipe_slow );














11621 %}
11622 
11623 //----------Overflow Math Instructions-----------------------------------------
11624 
11625 instruct overflowAddI_rReg(rFlagsReg cr, rax_RegI op1, rRegI op2)
11626 %{
11627   match(Set cr (OverflowAddI op1 op2));
11628   effect(DEF cr, USE_KILL op1, USE op2);
11629 
11630   format %{ "addl    $op1, $op2\t# overflow check int" %}
11631 
11632   ins_encode %{
11633     __ addl($op1$$Register, $op2$$Register);
11634   %}
11635   ins_pipe(ialu_reg_reg);
11636 %}
11637 
11638 instruct overflowAddI_rReg_imm(rFlagsReg cr, rax_RegI op1, immI op2)
11639 %{
11640   match(Set cr (OverflowAddI op1 op2));




2099   %}
2100 
2101   enc_class clear_avx %{
2102     debug_only(int off0 = cbuf.insts_size());
2103     if (generate_vzeroupper(Compile::current())) {
2104       // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
2105       // Clear upper bits of YMM registers when current compiled code uses
2106       // wide vectors to avoid AVX <-> SSE transition penalty during call.
2107       MacroAssembler _masm(&cbuf);
2108       __ vzeroupper();
2109     }
2110     debug_only(int off1 = cbuf.insts_size());
2111     assert(off1 - off0 == clear_avx_size(), "correct size prediction");
2112   %}
2113 
2114   enc_class Java_To_Runtime(method meth) %{
2115     // No relocation needed
2116     MacroAssembler _masm(&cbuf);
2117     __ mov64(r10, (int64_t) $meth$$method);
2118     __ call(r10);
2119     __ post_call_nop();
2120   %}
2121 
2122   enc_class Java_To_Interpreter(method meth)
2123   %{
2124     // CALL Java_To_Interpreter
2125     // This is the instruction starting address for relocation info.
2126     cbuf.set_insts_mark();
2127     $$$emit8$primary;
2128     // CALL directly to the runtime
2129     emit_d32_reloc(cbuf,
2130                    (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2131                    runtime_call_Relocation::spec(),
2132                    RELOC_DISP32);
2133     __ post_call_nop();
2134   %}
2135 
2136   enc_class Java_Static_Call(method meth)
2137   %{
2138     // JAVA STATIC CALL
2139     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to
2140     // determine who we intended to call.
2141     MacroAssembler _masm(&cbuf);
2142     cbuf.set_insts_mark();
2143     $$$emit8$primary;
2144 
2145     if (!_method) {
2146       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2147                      runtime_call_Relocation::spec(),
2148                      RELOC_DISP32);
2149     } else {
2150       int method_index = resolved_method_index(cbuf);
2151       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
2152                                                   : static_call_Relocation::spec(method_index);
2153       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
2154                      rspec, RELOC_DISP32);
2155       // Emit stubs for static call.
2156       address mark = cbuf.insts_mark();
2157       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
2158       if (stub == NULL) {
2159         ciEnv::current()->record_failure("CodeCache is full");
2160         return;
2161       }
2162 #if INCLUDE_AOT
2163       CompiledStaticCall::emit_to_aot_stub(cbuf, mark);
2164 #endif
2165     }
2166     __ post_call_nop();
2167   %}
2168 
2169   enc_class Java_Dynamic_Call(method meth) %{
2170     MacroAssembler _masm(&cbuf);
2171     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));
2172     __ post_call_nop();
2173   %}
2174 
2175   enc_class Java_Compiled_Call(method meth)
2176   %{
2177     // JAVA COMPILED CALL
2178     int disp = in_bytes(Method:: from_compiled_offset());
2179 
2180     // XXX XXX offset is 128 is 1.5 NON-PRODUCT !!!
2181     // assert(-0x80 <= disp && disp < 0x80, "compiled_code_offset isn't small");
2182 
2183     // callq *disp(%rax)
2184     MacroAssembler _masm(&cbuf);
2185     cbuf.set_insts_mark();
2186     $$$emit8$primary;
2187     if (disp < 0x80) {
2188       emit_rm(cbuf, 0x01, $secondary, RAX_enc); // R/M byte
2189       emit_d8(cbuf, disp); // Displacement
2190     } else {
2191       emit_rm(cbuf, 0x02, $secondary, RAX_enc); // R/M byte
2192       emit_d32(cbuf, disp); // Displacement
2193     }
2194     __ post_call_nop();
2195   %}
2196 
2197   enc_class reg_opc_imm(rRegI dst, immI8 shift)
2198   %{
2199     // SAL, SAR, SHR
2200     int dstenc = $dst$$reg;
2201     if (dstenc >= 8) {
2202       emit_opcode(cbuf, Assembler::REX_B);
2203       dstenc -= 8;
2204     }
2205     $$$emit8$primary;
2206     emit_rm(cbuf, 0x3, $secondary, dstenc);
2207     $$$emit8$shift$$constant;
2208   %}
2209 
2210   enc_class reg_opc_imm_wide(rRegL dst, immI8 shift)
2211   %{
2212     // SAL, SAR, SHR
2213     int dstenc = $dst$$reg;
2214     if (dstenc < 8) {


11608     __ byte_array_inflate($src$$Register, $dst$$Register, $len$$Register,
11609                           $tmp1$$XMMRegister, $tmp2$$Register);
11610   %}
11611   ins_pipe( pipe_slow );
11612 %}
11613 
11614 // encode char[] to byte[] in ISO_8859_1
11615 instruct encode_iso_array(rsi_RegP src, rdi_RegP dst, rdx_RegI len,
11616                           legVecS tmp1, legVecS tmp2, legVecS tmp3, legVecS tmp4,
11617                           rcx_RegI tmp5, rax_RegI result, rFlagsReg cr) %{
11618   match(Set result (EncodeISOArray src (Binary dst len)));
11619   effect(TEMP tmp1, TEMP tmp2, TEMP tmp3, TEMP tmp4, USE_KILL src, USE_KILL dst, USE_KILL len, KILL tmp5, KILL cr);
11620 
11621   format %{ "Encode array $src,$dst,$len -> $result    // KILL RCX, RDX, $tmp1, $tmp2, $tmp3, $tmp4, RSI, RDI " %}
11622   ins_encode %{
11623     __ encode_iso_array($src$$Register, $dst$$Register, $len$$Register,
11624                         $tmp1$$XMMRegister, $tmp2$$XMMRegister, $tmp3$$XMMRegister,
11625                         $tmp4$$XMMRegister, $tmp5$$Register, $result$$Register);
11626   %}
11627   ins_pipe( pipe_slow );
11628 %}
11629 
11630 instruct getFP(rRegL dst) %{
11631   match(Set dst (GetFP));
11632   effect(DEF dst);
11633   ins_cost(1);
11634  
11635   ins_encode %{
11636     // Remove wordSize for return addr which is already pushed.
11637     int framesize = Compile::current()->frame_size_in_bytes() - wordSize;
11638     Address base(rsp, framesize);
11639     __ lea($dst$$Register, base);
11640   %}
11641   ins_pipe(ialu_reg_reg_long);
11642 %}
11643 
11644 //----------Overflow Math Instructions-----------------------------------------
11645 
11646 instruct overflowAddI_rReg(rFlagsReg cr, rax_RegI op1, rRegI op2)
11647 %{
11648   match(Set cr (OverflowAddI op1 op2));
11649   effect(DEF cr, USE_KILL op1, USE op2);
11650 
11651   format %{ "addl    $op1, $op2\t# overflow check int" %}
11652 
11653   ins_encode %{
11654     __ addl($op1$$Register, $op2$$Register);
11655   %}
11656   ins_pipe(ialu_reg_reg);
11657 %}
11658 
11659 instruct overflowAddI_rReg_imm(rFlagsReg cr, rax_RegI op1, immI op2)
11660 %{
11661   match(Set cr (OverflowAddI op1 op2));


< prev index next >