< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.hpp

Print this page

2220 
2221   void cache_wb(Address line);
2222   void cache_wbsync(bool is_pre);
2223 
2224 #ifdef COMPILER2_OR_JVMCI
2225   void generate_fill_avx3(BasicType type, Register to, Register value,
2226                           Register count, Register rtmp, XMMRegister xtmp);
2227 #endif // COMPILER2_OR_JVMCI
2228 #endif // _LP64
2229 
2230   void vallones(XMMRegister dst, int vector_len);
2231 
2232   void check_stack_alignment(Register sp, const char* msg, unsigned bias = 0, Register tmp = noreg);
2233 
2234   void lightweight_lock(Register basic_lock, Register obj, Register reg_rax, Register tmp, Label& slow);
2235   void lightweight_unlock(Register obj, Register reg_rax, Register tmp, Label& slow);
2236 
2237 #ifdef _LP64
2238   void save_legacy_gprs();
2239   void restore_legacy_gprs();

2240   void setcc(Assembler::Condition comparison, Register dst);
2241 #endif
2242 };
2243 
2244 #endif // CPU_X86_MACROASSEMBLER_X86_HPP

2220 
2221   void cache_wb(Address line);
2222   void cache_wbsync(bool is_pre);
2223 
2224 #ifdef COMPILER2_OR_JVMCI
2225   void generate_fill_avx3(BasicType type, Register to, Register value,
2226                           Register count, Register rtmp, XMMRegister xtmp);
2227 #endif // COMPILER2_OR_JVMCI
2228 #endif // _LP64
2229 
2230   void vallones(XMMRegister dst, int vector_len);
2231 
2232   void check_stack_alignment(Register sp, const char* msg, unsigned bias = 0, Register tmp = noreg);
2233 
2234   void lightweight_lock(Register basic_lock, Register obj, Register reg_rax, Register tmp, Label& slow);
2235   void lightweight_unlock(Register obj, Register reg_rax, Register tmp, Label& slow);
2236 
2237 #ifdef _LP64
2238   void save_legacy_gprs();
2239   void restore_legacy_gprs();
2240   void load_aotrc_address(Register reg, address a);
2241   void setcc(Assembler::Condition comparison, Register dst);
2242 #endif
2243 };
2244 
2245 #endif // CPU_X86_MACROASSEMBLER_X86_HPP
< prev index next >