< prev index next >

src/hotspot/cpu/x86/x86_64.ad

Print this page

 2121   %}
 2122 
 2123   enc_class clear_avx %{
 2124     debug_only(int off0 = cbuf.insts_size());
 2125     if (generate_vzeroupper(Compile::current())) {
 2126       // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
 2127       // Clear upper bits of YMM registers when current compiled code uses
 2128       // wide vectors to avoid AVX <-> SSE transition penalty during call.
 2129       MacroAssembler _masm(&cbuf);
 2130       __ vzeroupper();
 2131     }
 2132     debug_only(int off1 = cbuf.insts_size());
 2133     assert(off1 - off0 == clear_avx_size(), "correct size prediction");
 2134   %}
 2135 
 2136   enc_class Java_To_Runtime(method meth) %{
 2137     // No relocation needed
 2138     MacroAssembler _masm(&cbuf);
 2139     __ mov64(r10, (int64_t) $meth$$method);
 2140     __ call(r10);

 2141   %}
 2142 
 2143   enc_class Java_To_Interpreter(method meth)
 2144   %{
 2145     // CALL Java_To_Interpreter
 2146     // This is the instruction starting address for relocation info.
 2147     cbuf.set_insts_mark();
 2148     $$$emit8$primary;
 2149     // CALL directly to the runtime
 2150     emit_d32_reloc(cbuf,
 2151                    (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2152                    runtime_call_Relocation::spec(),
 2153                    RELOC_DISP32);

 2154   %}
 2155 
 2156   enc_class Java_Static_Call(method meth)
 2157   %{
 2158     // JAVA STATIC CALL
 2159     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to
 2160     // determine who we intended to call.

 2161     cbuf.set_insts_mark();
 2162     $$$emit8$primary;
 2163 
 2164     if (!_method) {
 2165       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2166                      runtime_call_Relocation::spec(),
 2167                      RELOC_DISP32);
 2168     } else {
 2169       int method_index = resolved_method_index(cbuf);
 2170       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
 2171                                                   : static_call_Relocation::spec(method_index);
 2172       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2173                      rspec, RELOC_DISP32);
 2174       // Emit stubs for static call.
 2175       address mark = cbuf.insts_mark();
 2176       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
 2177       if (stub == NULL) {
 2178         ciEnv::current()->record_failure("CodeCache is full");
 2179         return;
 2180       }
 2181     }

 2182   %}
 2183 
 2184   enc_class Java_Dynamic_Call(method meth) %{
 2185     MacroAssembler _masm(&cbuf);
 2186     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));

 2187   %}
 2188 
 2189   enc_class Java_Compiled_Call(method meth)
 2190   %{
 2191     // JAVA COMPILED CALL
 2192     int disp = in_bytes(Method:: from_compiled_offset());
 2193 
 2194     // XXX XXX offset is 128 is 1.5 NON-PRODUCT !!!
 2195     // assert(-0x80 <= disp && disp < 0x80, "compiled_code_offset isn't small");
 2196 
 2197     // callq *disp(%rax)

 2198     cbuf.set_insts_mark();
 2199     $$$emit8$primary;
 2200     if (disp < 0x80) {
 2201       emit_rm(cbuf, 0x01, $secondary, RAX_enc); // R/M byte
 2202       emit_d8(cbuf, disp); // Displacement
 2203     } else {
 2204       emit_rm(cbuf, 0x02, $secondary, RAX_enc); // R/M byte
 2205       emit_d32(cbuf, disp); // Displacement
 2206     }

 2207   %}
 2208 
 2209   enc_class reg_opc_imm(rRegI dst, immI8 shift)
 2210   %{
 2211     // SAL, SAR, SHR
 2212     int dstenc = $dst$$reg;
 2213     if (dstenc >= 8) {
 2214       emit_opcode(cbuf, Assembler::REX_B);
 2215       dstenc -= 8;
 2216     }
 2217     $$$emit8$primary;
 2218     emit_rm(cbuf, 0x3, $secondary, dstenc);
 2219     $$$emit8$shift$$constant;
 2220   %}
 2221 
 2222   enc_class reg_opc_imm_wide(rRegL dst, immI8 shift)
 2223   %{
 2224     // SAL, SAR, SHR
 2225     int dstenc = $dst$$reg;
 2226     if (dstenc < 8) {

 2121   %}
 2122 
 2123   enc_class clear_avx %{
 2124     debug_only(int off0 = cbuf.insts_size());
 2125     if (generate_vzeroupper(Compile::current())) {
 2126       // Clear upper bits of YMM registers to avoid AVX <-> SSE transition penalty
 2127       // Clear upper bits of YMM registers when current compiled code uses
 2128       // wide vectors to avoid AVX <-> SSE transition penalty during call.
 2129       MacroAssembler _masm(&cbuf);
 2130       __ vzeroupper();
 2131     }
 2132     debug_only(int off1 = cbuf.insts_size());
 2133     assert(off1 - off0 == clear_avx_size(), "correct size prediction");
 2134   %}
 2135 
 2136   enc_class Java_To_Runtime(method meth) %{
 2137     // No relocation needed
 2138     MacroAssembler _masm(&cbuf);
 2139     __ mov64(r10, (int64_t) $meth$$method);
 2140     __ call(r10);
 2141     __ post_call_nop();
 2142   %}
 2143 
 2144   enc_class Java_To_Interpreter(method meth)
 2145   %{
 2146     // CALL Java_To_Interpreter
 2147     // This is the instruction starting address for relocation info.
 2148     cbuf.set_insts_mark();
 2149     $$$emit8$primary;
 2150     // CALL directly to the runtime
 2151     emit_d32_reloc(cbuf,
 2152                    (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2153                    runtime_call_Relocation::spec(),
 2154                    RELOC_DISP32);
 2155     __ post_call_nop();
 2156   %}
 2157 
 2158   enc_class Java_Static_Call(method meth)
 2159   %{
 2160     // JAVA STATIC CALL
 2161     // CALL to fixup routine.  Fixup routine uses ScopeDesc info to
 2162     // determine who we intended to call.
 2163     MacroAssembler _masm(&cbuf);
 2164     cbuf.set_insts_mark();
 2165     $$$emit8$primary;
 2166 
 2167     if (!_method) {
 2168       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2169                      runtime_call_Relocation::spec(),
 2170                      RELOC_DISP32);
 2171     } else {
 2172       int method_index = resolved_method_index(cbuf);
 2173       RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
 2174                                                   : static_call_Relocation::spec(method_index);
 2175       emit_d32_reloc(cbuf, (int) ($meth$$method - ((intptr_t) cbuf.insts_end()) - 4),
 2176                      rspec, RELOC_DISP32);
 2177       // Emit stubs for static call.
 2178       address mark = cbuf.insts_mark();
 2179       address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, mark);
 2180       if (stub == NULL) {
 2181         ciEnv::current()->record_failure("CodeCache is full");
 2182         return;
 2183       }
 2184     }
 2185     __ post_call_nop();
 2186   %}
 2187 
 2188   enc_class Java_Dynamic_Call(method meth) %{
 2189     MacroAssembler _masm(&cbuf);
 2190     __ ic_call((address)$meth$$method, resolved_method_index(cbuf));
 2191     __ post_call_nop();
 2192   %}
 2193 
 2194   enc_class Java_Compiled_Call(method meth)
 2195   %{
 2196     // JAVA COMPILED CALL
 2197     int disp = in_bytes(Method:: from_compiled_offset());
 2198 
 2199     // XXX XXX offset is 128 is 1.5 NON-PRODUCT !!!
 2200     // assert(-0x80 <= disp && disp < 0x80, "compiled_code_offset isn't small");
 2201 
 2202     // callq *disp(%rax)
 2203     MacroAssembler _masm(&cbuf);
 2204     cbuf.set_insts_mark();
 2205     $$$emit8$primary;
 2206     if (disp < 0x80) {
 2207       emit_rm(cbuf, 0x01, $secondary, RAX_enc); // R/M byte
 2208       emit_d8(cbuf, disp); // Displacement
 2209     } else {
 2210       emit_rm(cbuf, 0x02, $secondary, RAX_enc); // R/M byte
 2211       emit_d32(cbuf, disp); // Displacement
 2212     }
 2213     __ post_call_nop();
 2214   %}
 2215 
 2216   enc_class reg_opc_imm(rRegI dst, immI8 shift)
 2217   %{
 2218     // SAL, SAR, SHR
 2219     int dstenc = $dst$$reg;
 2220     if (dstenc >= 8) {
 2221       emit_opcode(cbuf, Assembler::REX_B);
 2222       dstenc -= 8;
 2223     }
 2224     $$$emit8$primary;
 2225     emit_rm(cbuf, 0x3, $secondary, dstenc);
 2226     $$$emit8$shift$$constant;
 2227   %}
 2228 
 2229   enc_class reg_opc_imm_wide(rRegL dst, immI8 shift)
 2230   %{
 2231     // SAL, SAR, SHR
 2232     int dstenc = $dst$$reg;
 2233     if (dstenc < 8) {
< prev index next >