| 1 | /* CpuArch.h -- CPU specific code\r |
| 2 | 2016-06-09: Igor Pavlov : Public domain */\r |
| 3 | \r |
| 4 | #ifndef __CPU_ARCH_H\r |
| 5 | #define __CPU_ARCH_H\r |
| 6 | \r |
| 7 | #include "7zTypes.h"\r |
| 8 | \r |
| 9 | EXTERN_C_BEGIN\r |
| 10 | \r |
| 11 | /*\r |
| 12 | MY_CPU_LE means that CPU is LITTLE ENDIAN.\r |
| 13 | MY_CPU_BE means that CPU is BIG ENDIAN.\r |
| 14 | If MY_CPU_LE and MY_CPU_BE are not defined, we don't know about ENDIANNESS of platform.\r |
| 15 | \r |
| 16 | MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned memory accesses.\r |
| 17 | */\r |
| 18 | \r |
| 19 | #if defined(_M_X64) \\r |
| 20 | || defined(_M_AMD64) \\r |
| 21 | || defined(__x86_64__) \\r |
| 22 | || defined(__AMD64__) \\r |
| 23 | || defined(__amd64__)\r |
| 24 | #define MY_CPU_AMD64\r |
| 25 | #endif\r |
| 26 | \r |
| 27 | #if defined(MY_CPU_AMD64) \\r |
| 28 | || defined(_M_IA64) \\r |
| 29 | || defined(__AARCH64EL__) \\r |
| 30 | || defined(__AARCH64EB__)\r |
| 31 | #define MY_CPU_64BIT\r |
| 32 | #endif\r |
| 33 | \r |
| 34 | #if defined(_M_IX86) || defined(__i386__)\r |
| 35 | #define MY_CPU_X86\r |
| 36 | #endif\r |
| 37 | \r |
| 38 | #if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)\r |
| 39 | #define MY_CPU_X86_OR_AMD64\r |
| 40 | #endif\r |
| 41 | \r |
| 42 | #if defined(MY_CPU_X86) \\r |
| 43 | || defined(_M_ARM) \\r |
| 44 | || defined(__ARMEL__) \\r |
| 45 | || defined(__THUMBEL__) \\r |
| 46 | || defined(__ARMEB__) \\r |
| 47 | || defined(__THUMBEB__)\r |
| 48 | #define MY_CPU_32BIT\r |
| 49 | #endif\r |
| 50 | \r |
| 51 | #if defined(_WIN32) && defined(_M_ARM)\r |
| 52 | #define MY_CPU_ARM_LE\r |
| 53 | #endif\r |
| 54 | \r |
| 55 | #if defined(_WIN32) && defined(_M_IA64)\r |
| 56 | #define MY_CPU_IA64_LE\r |
| 57 | #endif\r |
| 58 | \r |
| 59 | #if defined(MY_CPU_X86_OR_AMD64) \\r |
| 60 | || defined(MY_CPU_ARM_LE) \\r |
| 61 | || defined(MY_CPU_IA64_LE) \\r |
| 62 | || defined(__LITTLE_ENDIAN__) \\r |
| 63 | || defined(__ARMEL__) \\r |
| 64 | || defined(__THUMBEL__) \\r |
| 65 | || defined(__AARCH64EL__) \\r |
| 66 | || defined(__MIPSEL__) \\r |
| 67 | || defined(__MIPSEL) \\r |
| 68 | || defined(_MIPSEL) \\r |
| 69 | || defined(__BFIN__) \\r |
| 70 | || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))\r |
| 71 | #define MY_CPU_LE\r |
| 72 | #endif\r |
| 73 | \r |
| 74 | #if defined(__BIG_ENDIAN__) \\r |
| 75 | || defined(__ARMEB__) \\r |
| 76 | || defined(__THUMBEB__) \\r |
| 77 | || defined(__AARCH64EB__) \\r |
| 78 | || defined(__MIPSEB__) \\r |
| 79 | || defined(__MIPSEB) \\r |
| 80 | || defined(_MIPSEB) \\r |
| 81 | || defined(__m68k__) \\r |
| 82 | || defined(__s390__) \\r |
| 83 | || defined(__s390x__) \\r |
| 84 | || defined(__zarch__) \\r |
| 85 | || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))\r |
| 86 | #define MY_CPU_BE\r |
| 87 | #endif\r |
| 88 | \r |
| 89 | #if defined(MY_CPU_LE) && defined(MY_CPU_BE)\r |
| 90 | Stop_Compiling_Bad_Endian\r |
| 91 | #endif\r |
| 92 | \r |
| 93 | \r |
| 94 | #ifdef MY_CPU_LE\r |
| 95 | #if defined(MY_CPU_X86_OR_AMD64) \\r |
| 96 | /* || defined(__AARCH64EL__) */\r |
| 97 | #define MY_CPU_LE_UNALIGN\r |
| 98 | #endif\r |
| 99 | #endif\r |
| 100 | \r |
| 101 | \r |
| 102 | #ifdef MY_CPU_LE_UNALIGN\r |
| 103 | \r |
| 104 | #define GetUi16(p) (*(const UInt16 *)(const void *)(p))\r |
| 105 | #define GetUi32(p) (*(const UInt32 *)(const void *)(p))\r |
| 106 | #define GetUi64(p) (*(const UInt64 *)(const void *)(p))\r |
| 107 | \r |
| 108 | #define SetUi16(p, v) { *(UInt16 *)(p) = (v); }\r |
| 109 | #define SetUi32(p, v) { *(UInt32 *)(p) = (v); }\r |
| 110 | #define SetUi64(p, v) { *(UInt64 *)(p) = (v); }\r |
| 111 | \r |
| 112 | #else\r |
| 113 | \r |
| 114 | #define GetUi16(p) ( (UInt16) ( \\r |
| 115 | ((const Byte *)(p))[0] | \\r |
| 116 | ((UInt16)((const Byte *)(p))[1] << 8) ))\r |
| 117 | \r |
| 118 | #define GetUi32(p) ( \\r |
| 119 | ((const Byte *)(p))[0] | \\r |
| 120 | ((UInt32)((const Byte *)(p))[1] << 8) | \\r |
| 121 | ((UInt32)((const Byte *)(p))[2] << 16) | \\r |
| 122 | ((UInt32)((const Byte *)(p))[3] << 24))\r |
| 123 | \r |
| 124 | #define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32))\r |
| 125 | \r |
| 126 | #define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r |
| 127 | _ppp_[0] = (Byte)_vvv_; \\r |
| 128 | _ppp_[1] = (Byte)(_vvv_ >> 8); }\r |
| 129 | \r |
| 130 | #define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r |
| 131 | _ppp_[0] = (Byte)_vvv_; \\r |
| 132 | _ppp_[1] = (Byte)(_vvv_ >> 8); \\r |
| 133 | _ppp_[2] = (Byte)(_vvv_ >> 16); \\r |
| 134 | _ppp_[3] = (Byte)(_vvv_ >> 24); }\r |
| 135 | \r |
| 136 | #define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v); \\r |
| 137 | SetUi32(_ppp2_ , (UInt32)_vvv2_); \\r |
| 138 | SetUi32(_ppp2_ + 4, (UInt32)(_vvv2_ >> 32)); }\r |
| 139 | \r |
| 140 | #endif\r |
| 141 | \r |
| 142 | \r |
| 143 | #if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)\r |
| 144 | \r |
| 145 | /* Note: we use bswap instruction, that is unsupported in 386 cpu */\r |
| 146 | \r |
| 147 | #include <stdlib.h>\r |
| 148 | \r |
| 149 | #pragma intrinsic(_byteswap_ulong)\r |
| 150 | #pragma intrinsic(_byteswap_uint64)\r |
| 151 | #define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))\r |
| 152 | #define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))\r |
| 153 | \r |
| 154 | #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)\r |
| 155 | \r |
| 156 | #elif defined(MY_CPU_LE_UNALIGN) && defined (__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))\r |
| 157 | \r |
| 158 | #define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))\r |
| 159 | #define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))\r |
| 160 | \r |
| 161 | #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)\r |
| 162 | \r |
| 163 | #else\r |
| 164 | \r |
| 165 | #define GetBe32(p) ( \\r |
| 166 | ((UInt32)((const Byte *)(p))[0] << 24) | \\r |
| 167 | ((UInt32)((const Byte *)(p))[1] << 16) | \\r |
| 168 | ((UInt32)((const Byte *)(p))[2] << 8) | \\r |
| 169 | ((const Byte *)(p))[3] )\r |
| 170 | \r |
| 171 | #define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4))\r |
| 172 | \r |
| 173 | #define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r |
| 174 | _ppp_[0] = (Byte)(_vvv_ >> 24); \\r |
| 175 | _ppp_[1] = (Byte)(_vvv_ >> 16); \\r |
| 176 | _ppp_[2] = (Byte)(_vvv_ >> 8); \\r |
| 177 | _ppp_[3] = (Byte)_vvv_; }\r |
| 178 | \r |
| 179 | #endif\r |
| 180 | \r |
| 181 | \r |
| 182 | #define GetBe16(p) ( (UInt16) ( \\r |
| 183 | ((UInt16)((const Byte *)(p))[0] << 8) | \\r |
| 184 | ((const Byte *)(p))[1] ))\r |
| 185 | \r |
| 186 | \r |
| 187 | \r |
| 188 | #ifdef MY_CPU_X86_OR_AMD64\r |
| 189 | \r |
| 190 | typedef struct\r |
| 191 | {\r |
| 192 | UInt32 maxFunc;\r |
| 193 | UInt32 vendor[3];\r |
| 194 | UInt32 ver;\r |
| 195 | UInt32 b;\r |
| 196 | UInt32 c;\r |
| 197 | UInt32 d;\r |
| 198 | } Cx86cpuid;\r |
| 199 | \r |
| 200 | enum\r |
| 201 | {\r |
| 202 | CPU_FIRM_INTEL,\r |
| 203 | CPU_FIRM_AMD,\r |
| 204 | CPU_FIRM_VIA\r |
| 205 | };\r |
| 206 | \r |
| 207 | void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);\r |
| 208 | \r |
| 209 | Bool x86cpuid_CheckAndRead(Cx86cpuid *p);\r |
| 210 | int x86cpuid_GetFirm(const Cx86cpuid *p);\r |
| 211 | \r |
| 212 | #define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))\r |
| 213 | #define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))\r |
| 214 | #define x86cpuid_GetStepping(ver) (ver & 0xF)\r |
| 215 | \r |
| 216 | Bool CPU_Is_InOrder();\r |
| 217 | Bool CPU_Is_Aes_Supported();\r |
| 218 | \r |
| 219 | #endif\r |
| 220 | \r |
| 221 | EXTERN_C_END\r |
| 222 | \r |
| 223 | #endif\r |