| 1 | /* CpuArch.c -- CPU specific code\r |
| 2 | 2016-02-25: Igor Pavlov : Public domain */\r |
| 3 | \r |
| 4 | #include "Precomp.h"\r |
| 5 | \r |
| 6 | #include "CpuArch.h"\r |
| 7 | \r |
| 8 | #ifdef MY_CPU_X86_OR_AMD64\r |
| 9 | \r |
| 10 | #if (defined(_MSC_VER) && !defined(MY_CPU_AMD64)) || defined(__GNUC__)\r |
| 11 | #define USE_ASM\r |
| 12 | #endif\r |
| 13 | \r |
| 14 | #if !defined(USE_ASM) && _MSC_VER >= 1500\r |
| 15 | #include <intrin.h>\r |
| 16 | #endif\r |
| 17 | \r |
| 18 | #if defined(USE_ASM) && !defined(MY_CPU_AMD64)\r |
| 19 | static UInt32 CheckFlag(UInt32 flag)\r |
| 20 | {\r |
| 21 | #ifdef _MSC_VER\r |
| 22 | __asm pushfd;\r |
| 23 | __asm pop EAX;\r |
| 24 | __asm mov EDX, EAX;\r |
| 25 | __asm xor EAX, flag;\r |
| 26 | __asm push EAX;\r |
| 27 | __asm popfd;\r |
| 28 | __asm pushfd;\r |
| 29 | __asm pop EAX;\r |
| 30 | __asm xor EAX, EDX;\r |
| 31 | __asm push EDX;\r |
| 32 | __asm popfd;\r |
| 33 | __asm and flag, EAX;\r |
| 34 | #else\r |
| 35 | __asm__ __volatile__ (\r |
| 36 | "pushf\n\t"\r |
| 37 | "pop %%EAX\n\t"\r |
| 38 | "movl %%EAX,%%EDX\n\t"\r |
| 39 | "xorl %0,%%EAX\n\t"\r |
| 40 | "push %%EAX\n\t"\r |
| 41 | "popf\n\t"\r |
| 42 | "pushf\n\t"\r |
| 43 | "pop %%EAX\n\t"\r |
| 44 | "xorl %%EDX,%%EAX\n\t"\r |
| 45 | "push %%EDX\n\t"\r |
| 46 | "popf\n\t"\r |
| 47 | "andl %%EAX, %0\n\t":\r |
| 48 | "=c" (flag) : "c" (flag) :\r |
| 49 | "%eax", "%edx");\r |
| 50 | #endif\r |
| 51 | return flag;\r |
| 52 | }\r |
| 53 | #define CHECK_CPUID_IS_SUPPORTED if (CheckFlag(1 << 18) == 0 || CheckFlag(1 << 21) == 0) return False;\r |
| 54 | #else\r |
| 55 | #define CHECK_CPUID_IS_SUPPORTED\r |
| 56 | #endif\r |
| 57 | \r |
| 58 | void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d)\r |
| 59 | {\r |
| 60 | #ifdef USE_ASM\r |
| 61 | \r |
| 62 | #ifdef _MSC_VER\r |
| 63 | \r |
| 64 | UInt32 a2, b2, c2, d2;\r |
| 65 | __asm xor EBX, EBX;\r |
| 66 | __asm xor ECX, ECX;\r |
| 67 | __asm xor EDX, EDX;\r |
| 68 | __asm mov EAX, function;\r |
| 69 | __asm cpuid;\r |
| 70 | __asm mov a2, EAX;\r |
| 71 | __asm mov b2, EBX;\r |
| 72 | __asm mov c2, ECX;\r |
| 73 | __asm mov d2, EDX;\r |
| 74 | \r |
| 75 | *a = a2;\r |
| 76 | *b = b2;\r |
| 77 | *c = c2;\r |
| 78 | *d = d2;\r |
| 79 | \r |
| 80 | #else\r |
| 81 | \r |
| 82 | __asm__ __volatile__ (\r |
| 83 | #if defined(MY_CPU_AMD64) && defined(__PIC__)\r |
| 84 | "mov %%rbx, %%rdi;"\r |
| 85 | "cpuid;"\r |
| 86 | "xchg %%rbx, %%rdi;"\r |
| 87 | : "=a" (*a) ,\r |
| 88 | "=D" (*b) ,\r |
| 89 | #elif defined(MY_CPU_X86) && defined(__PIC__)\r |
| 90 | "mov %%ebx, %%edi;"\r |
| 91 | "cpuid;"\r |
| 92 | "xchgl %%ebx, %%edi;"\r |
| 93 | : "=a" (*a) ,\r |
| 94 | "=D" (*b) ,\r |
| 95 | #else\r |
| 96 | "cpuid"\r |
| 97 | : "=a" (*a) ,\r |
| 98 | "=b" (*b) ,\r |
| 99 | #endif\r |
| 100 | "=c" (*c) ,\r |
| 101 | "=d" (*d)\r |
| 102 | : "0" (function)) ;\r |
| 103 | \r |
| 104 | #endif\r |
| 105 | \r |
| 106 | #else\r |
| 107 | \r |
| 108 | int CPUInfo[4];\r |
| 109 | __cpuid(CPUInfo, function);\r |
| 110 | *a = CPUInfo[0];\r |
| 111 | *b = CPUInfo[1];\r |
| 112 | *c = CPUInfo[2];\r |
| 113 | *d = CPUInfo[3];\r |
| 114 | \r |
| 115 | #endif\r |
| 116 | }\r |
| 117 | \r |
| 118 | Bool x86cpuid_CheckAndRead(Cx86cpuid *p)\r |
| 119 | {\r |
| 120 | CHECK_CPUID_IS_SUPPORTED\r |
| 121 | MyCPUID(0, &p->maxFunc, &p->vendor[0], &p->vendor[2], &p->vendor[1]);\r |
| 122 | MyCPUID(1, &p->ver, &p->b, &p->c, &p->d);\r |
| 123 | return True;\r |
| 124 | }\r |
| 125 | \r |
| 126 | static const UInt32 kVendors[][3] =\r |
| 127 | {\r |
| 128 | { 0x756E6547, 0x49656E69, 0x6C65746E},\r |
| 129 | { 0x68747541, 0x69746E65, 0x444D4163},\r |
| 130 | { 0x746E6543, 0x48727561, 0x736C7561}\r |
| 131 | };\r |
| 132 | \r |
| 133 | int x86cpuid_GetFirm(const Cx86cpuid *p)\r |
| 134 | {\r |
| 135 | unsigned i;\r |
| 136 | for (i = 0; i < sizeof(kVendors) / sizeof(kVendors[i]); i++)\r |
| 137 | {\r |
| 138 | const UInt32 *v = kVendors[i];\r |
| 139 | if (v[0] == p->vendor[0] &&\r |
| 140 | v[1] == p->vendor[1] &&\r |
| 141 | v[2] == p->vendor[2])\r |
| 142 | return (int)i;\r |
| 143 | }\r |
| 144 | return -1;\r |
| 145 | }\r |
| 146 | \r |
| 147 | Bool CPU_Is_InOrder()\r |
| 148 | {\r |
| 149 | Cx86cpuid p;\r |
| 150 | int firm;\r |
| 151 | UInt32 family, model;\r |
| 152 | if (!x86cpuid_CheckAndRead(&p))\r |
| 153 | return True;\r |
| 154 | \r |
| 155 | family = x86cpuid_GetFamily(p.ver);\r |
| 156 | model = x86cpuid_GetModel(p.ver);\r |
| 157 | \r |
| 158 | firm = x86cpuid_GetFirm(&p);\r |
| 159 | \r |
| 160 | switch (firm)\r |
| 161 | {\r |
| 162 | case CPU_FIRM_INTEL: return (family < 6 || (family == 6 && (\r |
| 163 | /* In-Order Atom CPU */\r |
| 164 | model == 0x1C /* 45 nm, N4xx, D4xx, N5xx, D5xx, 230, 330 */\r |
| 165 | || model == 0x26 /* 45 nm, Z6xx */\r |
| 166 | || model == 0x27 /* 32 nm, Z2460 */\r |
| 167 | || model == 0x35 /* 32 nm, Z2760 */\r |
| 168 | || model == 0x36 /* 32 nm, N2xxx, D2xxx */\r |
| 169 | )));\r |
| 170 | case CPU_FIRM_AMD: return (family < 5 || (family == 5 && (model < 6 || model == 0xA)));\r |
| 171 | case CPU_FIRM_VIA: return (family < 6 || (family == 6 && model < 0xF));\r |
| 172 | }\r |
| 173 | return True;\r |
| 174 | }\r |
| 175 | \r |
| 176 | #if !defined(MY_CPU_AMD64) && defined(_WIN32)\r |
| 177 | #include <windows.h>\r |
| 178 | static Bool CPU_Sys_Is_SSE_Supported()\r |
| 179 | {\r |
| 180 | #if _MSC_VER >= 1900 && WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)\r |
| 181 | return True;\r |
| 182 | #else\r |
| 183 | OSVERSIONINFO vi;\r |
| 184 | vi.dwOSVersionInfoSize = sizeof(vi);\r |
| 185 | if (!GetVersionEx(&vi))\r |
| 186 | return False;\r |
| 187 | return (vi.dwMajorVersion >= 5);\r |
| 188 | #endif\r |
| 189 | }\r |
| 190 | #define CHECK_SYS_SSE_SUPPORT if (!CPU_Sys_Is_SSE_Supported()) return False;\r |
| 191 | #else\r |
| 192 | #define CHECK_SYS_SSE_SUPPORT\r |
| 193 | #endif\r |
| 194 | \r |
| 195 | Bool CPU_Is_Aes_Supported()\r |
| 196 | {\r |
| 197 | Cx86cpuid p;\r |
| 198 | CHECK_SYS_SSE_SUPPORT\r |
| 199 | if (!x86cpuid_CheckAndRead(&p))\r |
| 200 | return False;\r |
| 201 | return (p.c >> 25) & 1;\r |
| 202 | }\r |
| 203 | \r |
| 204 | #endif\r |