2226
2227 void cache_wb(Address line);
2228 void cache_wbsync(bool is_pre);
2229
2230 #ifdef COMPILER2_OR_JVMCI
2231 void generate_fill_avx3(BasicType type, Register to, Register value,
2232 Register count, Register rtmp, XMMRegister xtmp);
2233 #endif // COMPILER2_OR_JVMCI
2234 #endif // _LP64
2235
2236 void vallones(XMMRegister dst, int vector_len);
2237
2238 void check_stack_alignment(Register sp, const char* msg, unsigned bias = 0, Register tmp = noreg);
2239
2240 void lightweight_lock(Register basic_lock, Register obj, Register reg_rax, Register thread, Register tmp, Label& slow);
2241 void lightweight_unlock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow);
2242
2243 #ifdef _LP64
2244 void save_legacy_gprs();
2245 void restore_legacy_gprs();
2246 void setcc(Assembler::Condition comparison, Register dst);
2247 #endif
2248 };
2249
2250 #endif // CPU_X86_MACROASSEMBLER_X86_HPP
|
2226
2227 void cache_wb(Address line);
2228 void cache_wbsync(bool is_pre);
2229
2230 #ifdef COMPILER2_OR_JVMCI
2231 void generate_fill_avx3(BasicType type, Register to, Register value,
2232 Register count, Register rtmp, XMMRegister xtmp);
2233 #endif // COMPILER2_OR_JVMCI
2234 #endif // _LP64
2235
2236 void vallones(XMMRegister dst, int vector_len);
2237
2238 void check_stack_alignment(Register sp, const char* msg, unsigned bias = 0, Register tmp = noreg);
2239
2240 void lightweight_lock(Register basic_lock, Register obj, Register reg_rax, Register thread, Register tmp, Label& slow);
2241 void lightweight_unlock(Register obj, Register reg_rax, Register thread, Register tmp, Label& slow);
2242
2243 #ifdef _LP64
2244 void save_legacy_gprs();
2245 void restore_legacy_gprs();
2246 void load_aotrc_address(Register reg, address a);
2247 void setcc(Assembler::Condition comparison, Register dst);
2248 #endif
2249 };
2250
2251 #endif // CPU_X86_MACROASSEMBLER_X86_HPP
|