1839 FLAG_SET_DEFAULT(UseFastStosb, false);
1840 }
1841
1842 // For AMD Processors use XMM/YMM MOVDQU instructions
1843 // for Object Initialization as default
1844 if (is_amd() && cpu_family() >= 0x19) {
1845 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1846 UseFastStosb = false;
1847 }
1848 }
1849
1850 #ifdef COMPILER2
1851 if (is_intel() && MaxVectorSize > 16) {
1852 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1853 UseFastStosb = false;
1854 }
1855 }
1856 #endif
1857
1858 // Use XMM/YMM MOVDQU instruction for Object Initialization
1859 if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1860 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1861 UseXMMForObjInit = true;
1862 }
1863 } else if (UseXMMForObjInit) {
1864 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1865 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1866 }
1867
1868 #ifdef COMPILER2
1869 if (FLAG_IS_DEFAULT(AlignVector)) {
1870 // Modern processors allow misaligned memory operations for vectors.
1871 AlignVector = !UseUnalignedLoadStores;
1872 }
1873 #endif // COMPILER2
1874
1875 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1876 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1877 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1878 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1879 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1839 FLAG_SET_DEFAULT(UseFastStosb, false);
1840 }
1841
1842 // For AMD Processors use XMM/YMM MOVDQU instructions
1843 // for Object Initialization as default
1844 if (is_amd() && cpu_family() >= 0x19) {
1845 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1846 UseFastStosb = false;
1847 }
1848 }
1849
1850 #ifdef COMPILER2
1851 if (is_intel() && MaxVectorSize > 16) {
1852 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1853 UseFastStosb = false;
1854 }
1855 }
1856 #endif
1857
1858 // Use XMM/YMM MOVDQU instruction for Object Initialization
1859 if (UseSSE >= 2 && UseUnalignedLoadStores) {
1860 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1861 UseXMMForObjInit = true;
1862 }
1863 } else if (UseXMMForObjInit) {
1864 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1865 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1866 }
1867
1868 #ifdef COMPILER2
1869 if (FLAG_IS_DEFAULT(AlignVector)) {
1870 // Modern processors allow misaligned memory operations for vectors.
1871 AlignVector = !UseUnalignedLoadStores;
1872 }
1873 #endif // COMPILER2
1874
1875 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1876 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1877 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1878 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1879 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|