1864 FLAG_SET_DEFAULT(UseFastStosb, false);
1865 }
1866
1867 // For AMD Processors use XMM/YMM MOVDQU instructions
1868 // for Object Initialization as default
1869 if (is_amd() && cpu_family() >= 0x19) {
1870 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1871 UseFastStosb = false;
1872 }
1873 }
1874
1875 #ifdef COMPILER2
1876 if (is_intel() && MaxVectorSize > 16) {
1877 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1878 UseFastStosb = false;
1879 }
1880 }
1881 #endif
1882
1883 // Use XMM/YMM MOVDQU instruction for Object Initialization
1884 if (!UseFastStosb && UseUnalignedLoadStores) {
1885 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1886 UseXMMForObjInit = true;
1887 }
1888 } else if (UseXMMForObjInit) {
1889 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1890 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1891 }
1892
1893 #ifdef COMPILER2
1894 if (FLAG_IS_DEFAULT(AlignVector)) {
1895 // Modern processors allow misaligned memory operations for vectors.
1896 AlignVector = !UseUnalignedLoadStores;
1897 }
1898 #endif // COMPILER2
1899
1900 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1901 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1902 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1903 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1904 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1864 FLAG_SET_DEFAULT(UseFastStosb, false);
1865 }
1866
1867 // For AMD Processors use XMM/YMM MOVDQU instructions
1868 // for Object Initialization as default
1869 if (is_amd() && cpu_family() >= 0x19) {
1870 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1871 UseFastStosb = false;
1872 }
1873 }
1874
1875 #ifdef COMPILER2
1876 if (is_intel() && MaxVectorSize > 16) {
1877 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1878 UseFastStosb = false;
1879 }
1880 }
1881 #endif
1882
1883 // Use XMM/YMM MOVDQU instruction for Object Initialization
1884 if (UseUnalignedLoadStores) {
1885 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1886 UseXMMForObjInit = true;
1887 }
1888 } else if (UseXMMForObjInit) {
1889 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1890 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1891 }
1892
1893 #ifdef COMPILER2
1894 if (FLAG_IS_DEFAULT(AlignVector)) {
1895 // Modern processors allow misaligned memory operations for vectors.
1896 AlignVector = !UseUnalignedLoadStores;
1897 }
1898 #endif // COMPILER2
1899
1900 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1901 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1902 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1903 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1904 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|