44 #ifndef OJPH_DISABLE_INTEL_SIMD
50 bool run_cpuid(uint32_t eax, uint32_t ecx, uint32_t* abcd)
52 #ifdef OJPH_COMPILER_MSVC
53 __cpuidex((
int *)abcd, eax, ecx);
55 uint32_t ebx = 0, edx = 0;
56 #if defined( __i386__ ) && defined ( __PIC__ )
58 __asm__ (
"movl %%ebx, %%edi \n\t cpuid \n\t xchgl %%ebx, %%edi"
59 :
"=D" (ebx),
"+a" (eax),
"+c" (ecx),
"=d" (edx) );
61 __asm__ (
"cpuid" :
"+b" (ebx),
"+a" (eax),
"+c" (ecx),
"=d" (edx) );
63 abcd[0] = eax; abcd[1] = ebx; abcd[2] = ecx; abcd[3] = edx;
71 #ifdef OJPH_COMPILER_MSVC
72 return _xgetbv(index);
74 uint32_t eax = 0, edx = 0;
75 __asm__ (
"xgetbv" :
"=a" (eax),
"=d" (edx) :
"c" (index) );
76 return ((uint64_t)edx << 32) | eax;
85 bool mmx_avail = ((mmx_abcd[3] & 0x00800000) == 0x00800000);
91 bool sse_avail = ((mmx_abcd[3] & 0x02000000) == 0x02000000);
95 bool sse2_avail = ((mmx_abcd[3] & 0x04000000) == 0x04000000);
99 bool sse3_avail = ((mmx_abcd[2] & 0x00000001) == 0x00000001);
103 bool ssse3_avail = ((mmx_abcd[2] & 0x00000200) == 0x00000200);
107 bool sse41_avail = ((mmx_abcd[2] & 0x00080000) == 0x00080000);
108 bool sse42_avail = ((mmx_abcd[2] & 0x00100000) == 0x00100000);
109 if (sse41_avail && sse42_avail)
113 uint64_t xcr_val = 0;
114 bool osxsave_avail, ymm_avail, avx_avail =
false;
115 osxsave_avail = ((mmx_abcd[2] & 0x08000000) == 0x08000000);
119 ymm_avail = osxsave_avail && ((xcr_val & 0x6) == 0x6);
120 avx_avail = ymm_avail && (mmx_abcd[2] & 0x10000000);
126 uint32_t avx2_abcd[4];
128 bool avx2_avail = (avx2_abcd[1] & 0x20) != 0;
133 avx2_avail && ((mmx_abcd[2] & 0x1000) == 0x1000);
139 osxsave_avail && ((xcr_val & 0xE) == 0xE);
140 bool avx512vl_avail = (avx2_abcd[1] & 0x80000000) != 0;
141 bool avx512_avail = zmm_avail && avx512vl_avail;
bool init_cpu_ext_level(int &level)
uint64_t read_xcr(uint32_t index)
static bool cpu_level_initialized
bool run_cpuid(uint32_t eax, uint32_t ecx, uint32_t *abcd)