Intrin.h 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958
  1. /* ===-------- Intrin.h ---------------------------------------------------===
  2. *
  3. * Permission is hereby granted, free of charge, to any person obtaining a copy
  4. * of this software and associated documentation files (the "Software"), to deal
  5. * in the Software without restriction, including without limitation the rights
  6. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  7. * copies of the Software, and to permit persons to whom the Software is
  8. * furnished to do so, subject to the following conditions:
  9. *
  10. * The above copyright notice and this permission notice shall be included in
  11. * all copies or substantial portions of the Software.
  12. *
  13. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  14. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  15. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  16. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  17. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  18. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  19. * THE SOFTWARE.
  20. *
  21. *===-----------------------------------------------------------------------===
  22. */
  23. /* Only include this if we're compiling for the windows platform. */
  24. #ifndef _MSC_VER
  25. #include_next <Intrin.h>
  26. #else
  27. #ifndef __INTRIN_H
  28. #define __INTRIN_H
  29. /* First include the standard intrinsics. */
  30. #if defined(__i386__) || defined(__x86_64__)
  31. #include <x86intrin.h>
  32. #endif
  33. /* For the definition of jmp_buf. */
  34. #if __STDC_HOSTED__
  35. #include <setjmp.h>
  36. #endif
  37. /* Define the default attributes for the functions in this file. */
  38. #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
  39. #ifdef __cplusplus
  40. extern "C" {
  41. #endif
  42. #if defined(__MMX__)
  43. /* And the random ones that aren't in those files. */
  44. __m64 _m_from_float(float);
  45. __m64 _m_from_int(int _l);
  46. void _m_prefetch(void *);
  47. float _m_to_float(__m64);
  48. int _m_to_int(__m64 _M);
  49. #endif
  50. /* Other assorted instruction intrinsics. */
  51. void __addfsbyte(unsigned long, unsigned char);
  52. void __addfsdword(unsigned long, unsigned long);
  53. void __addfsword(unsigned long, unsigned short);
  54. void __code_seg(const char *);
  55. static __inline__
  56. void __cpuid(int[4], int);
  57. static __inline__
  58. void __cpuidex(int[4], int, int);
  59. void __debugbreak(void);
  60. __int64 __emul(int, int);
  61. unsigned __int64 __emulu(unsigned int, unsigned int);
  62. void __cdecl __fastfail(unsigned int);
  63. unsigned int __getcallerseflags(void);
  64. static __inline__
  65. void __halt(void);
  66. unsigned char __inbyte(unsigned short);
  67. void __inbytestring(unsigned short, unsigned char *, unsigned long);
  68. void __incfsbyte(unsigned long);
  69. void __incfsdword(unsigned long);
  70. void __incfsword(unsigned long);
  71. unsigned long __indword(unsigned short);
  72. void __indwordstring(unsigned short, unsigned long *, unsigned long);
  73. void __int2c(void);
  74. void __invlpg(void *);
  75. unsigned short __inword(unsigned short);
  76. void __inwordstring(unsigned short, unsigned short *, unsigned long);
  77. void __lidt(void *);
  78. unsigned __int64 __ll_lshift(unsigned __int64, int);
  79. __int64 __ll_rshift(__int64, int);
  80. void __llwpcb(void *);
  81. unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
  82. void __lwpval32(unsigned int, unsigned int, unsigned int);
  83. unsigned int __lzcnt(unsigned int);
  84. unsigned short __lzcnt16(unsigned short);
  85. static __inline__
  86. void __movsb(unsigned char *, unsigned char const *, size_t);
  87. static __inline__
  88. void __movsd(unsigned long *, unsigned long const *, size_t);
  89. static __inline__
  90. void __movsw(unsigned short *, unsigned short const *, size_t);
  91. void __nop(void);
  92. void __nvreg_restore_fence(void);
  93. void __nvreg_save_fence(void);
  94. void __outbyte(unsigned short, unsigned char);
  95. void __outbytestring(unsigned short, unsigned char *, unsigned long);
  96. void __outdword(unsigned short, unsigned long);
  97. void __outdwordstring(unsigned short, unsigned long *, unsigned long);
  98. void __outword(unsigned short, unsigned short);
  99. void __outwordstring(unsigned short, unsigned short *, unsigned long);
  100. static __inline__
  101. unsigned int __popcnt(unsigned int);
  102. static __inline__
  103. unsigned short __popcnt16(unsigned short);
  104. unsigned long __readcr0(void);
  105. unsigned long __readcr2(void);
  106. static __inline__
  107. unsigned long __readcr3(void);
  108. unsigned long __readcr4(void);
  109. unsigned long __readcr8(void);
  110. unsigned int __readdr(unsigned int);
  111. #ifdef __i386__
  112. static __inline__
  113. unsigned char __readfsbyte(unsigned long);
  114. static __inline__
  115. unsigned long __readfsdword(unsigned long);
  116. static __inline__
  117. unsigned __int64 __readfsqword(unsigned long);
  118. static __inline__
  119. unsigned short __readfsword(unsigned long);
  120. #endif
  121. static __inline__
  122. unsigned __int64 __readmsr(unsigned long);
  123. unsigned __int64 __readpmc(unsigned long);
  124. unsigned long __segmentlimit(unsigned long);
  125. void __sidt(void *);
  126. void *__slwpcb(void);
  127. static __inline__
  128. void __stosb(unsigned char *, unsigned char, size_t);
  129. static __inline__
  130. void __stosd(unsigned long *, unsigned long, size_t);
  131. static __inline__
  132. void __stosw(unsigned short *, unsigned short, size_t);
  133. void __svm_clgi(void);
  134. void __svm_invlpga(void *, int);
  135. void __svm_skinit(int);
  136. void __svm_stgi(void);
  137. void __svm_vmload(size_t);
  138. void __svm_vmrun(size_t);
  139. void __svm_vmsave(size_t);
  140. void __ud2(void);
  141. unsigned __int64 __ull_rshift(unsigned __int64, int);
  142. void __vmx_off(void);
  143. void __vmx_vmptrst(unsigned __int64 *);
  144. void __wbinvd(void);
  145. void __writecr0(unsigned int);
  146. static __inline__
  147. void __writecr3(unsigned int);
  148. void __writecr4(unsigned int);
  149. void __writecr8(unsigned int);
  150. void __writedr(unsigned int, unsigned int);
  151. void __writefsbyte(unsigned long, unsigned char);
  152. void __writefsdword(unsigned long, unsigned long);
  153. void __writefsqword(unsigned long, unsigned __int64);
  154. void __writefsword(unsigned long, unsigned short);
  155. void __writemsr(unsigned long, unsigned __int64);
  156. static __inline__
  157. void *_AddressOfReturnAddress(void);
  158. static __inline__
  159. unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
  160. static __inline__
  161. unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
  162. static __inline__
  163. unsigned char _bittest(long const *, long);
  164. static __inline__
  165. unsigned char _bittestandcomplement(long *, long);
  166. static __inline__
  167. unsigned char _bittestandreset(long *, long);
  168. static __inline__
  169. unsigned char _bittestandset(long *, long);
  170. unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
  171. unsigned long __cdecl _byteswap_ulong(unsigned long);
  172. unsigned short __cdecl _byteswap_ushort(unsigned short);
  173. void __cdecl _disable(void);
  174. void __cdecl _enable(void);
  175. long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
  176. static __inline__
  177. long _InterlockedAnd(long volatile *_Value, long _Mask);
  178. static __inline__
  179. short _InterlockedAnd16(short volatile *_Value, short _Mask);
  180. static __inline__
  181. char _InterlockedAnd8(char volatile *_Value, char _Mask);
  182. unsigned char _interlockedbittestandreset(long volatile *, long);
  183. static __inline__
  184. unsigned char _interlockedbittestandset(long volatile *, long);
  185. static __inline__
  186. long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
  187. long _Exchange, long _Comparand);
  188. long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
  189. long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
  190. static __inline__
  191. short _InterlockedCompareExchange16(short volatile *_Destination,
  192. short _Exchange, short _Comparand);
  193. static __inline__
  194. __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
  195. __int64 _Exchange, __int64 _Comparand);
  196. __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
  197. __int64);
  198. __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
  199. __int64);
  200. static __inline__
  201. char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
  202. char _Comparand);
  203. void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
  204. void *);
  205. void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
  206. void *);
  207. static __inline__
  208. long __cdecl _InterlockedDecrement(long volatile *_Addend);
  209. static __inline__
  210. short _InterlockedDecrement16(short volatile *_Addend);
  211. long _InterlockedExchange(long volatile *_Target, long _Value);
  212. static __inline__
  213. short _InterlockedExchange16(short volatile *_Target, short _Value);
  214. static __inline__
  215. char _InterlockedExchange8(char volatile *_Target, char _Value);
  216. static __inline__
  217. long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
  218. long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
  219. long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
  220. static __inline__
  221. short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
  222. __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
  223. __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
  224. static __inline__
  225. char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
  226. static __inline__
  227. long __cdecl _InterlockedIncrement(long volatile *_Addend);
  228. static __inline__
  229. short _InterlockedIncrement16(short volatile *_Addend);
  230. static __inline__
  231. long _InterlockedOr(long volatile *_Value, long _Mask);
  232. static __inline__
  233. short _InterlockedOr16(short volatile *_Value, short _Mask);
  234. static __inline__
  235. char _InterlockedOr8(char volatile *_Value, char _Mask);
  236. static __inline__
  237. long _InterlockedXor(long volatile *_Value, long _Mask);
  238. static __inline__
  239. short _InterlockedXor16(short volatile *_Value, short _Mask);
  240. static __inline__
  241. char _InterlockedXor8(char volatile *_Value, char _Mask);
  242. void __cdecl _invpcid(unsigned int, void *);
  243. static __inline__
  244. unsigned long __cdecl _lrotl(unsigned long, int);
  245. static __inline__
  246. unsigned long __cdecl _lrotr(unsigned long, int);
  247. static __inline__
  248. static __inline__
  249. void _ReadBarrier(void);
  250. static __inline__
  251. void _ReadWriteBarrier(void);
  252. static __inline__
  253. void *_ReturnAddress(void);
  254. unsigned int _rorx_u32(unsigned int, const unsigned int);
  255. static __inline__
  256. unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
  257. static __inline__
  258. unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
  259. static __inline__
  260. unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
  261. static __inline__
  262. unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
  263. static __inline__
  264. unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
  265. static __inline__
  266. unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
  267. static __inline__
  268. unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
  269. static __inline__
  270. unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
  271. int _sarx_i32(int, unsigned int);
  272. #if __STDC_HOSTED__
  273. int __cdecl _setjmp(jmp_buf);
  274. #endif
  275. unsigned int _shlx_u32(unsigned int, unsigned int);
  276. unsigned int _shrx_u32(unsigned int, unsigned int);
  277. void _Store_HLERelease(long volatile *, long);
  278. void _Store64_HLERelease(__int64 volatile *, __int64);
  279. void _StorePointer_HLERelease(void *volatile *, void *);
  280. static __inline__
  281. void _WriteBarrier(void);
  282. unsigned __int32 xbegin(void);
  283. void _xend(void);
  284. static __inline__
  285. #define _XCR_XFEATURE_ENABLED_MASK 0
  286. unsigned __int64 __cdecl _xgetbv(unsigned int);
  287. void __cdecl _xrstor(void const *, unsigned __int64);
  288. void __cdecl _xsave(void *, unsigned __int64);
  289. void __cdecl _xsaveopt(void *, unsigned __int64);
  290. void __cdecl _xsetbv(unsigned int, unsigned __int64);
  291. /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
  292. #ifdef __x86_64__
  293. void __addgsbyte(unsigned long, unsigned char);
  294. void __addgsdword(unsigned long, unsigned long);
  295. void __addgsqword(unsigned long, unsigned __int64);
  296. void __addgsword(unsigned long, unsigned short);
  297. static __inline__
  298. void __faststorefence(void);
  299. void __incgsbyte(unsigned long);
  300. void __incgsdword(unsigned long);
  301. void __incgsqword(unsigned long);
  302. void __incgsword(unsigned long);
  303. unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
  304. void __lwpval64(unsigned __int64, unsigned int, unsigned int);
  305. unsigned __int64 __lzcnt64(unsigned __int64);
  306. static __inline__
  307. void __movsq(unsigned long long *, unsigned long long const *, size_t);
  308. __int64 __mulh(__int64, __int64);
  309. static __inline__
  310. unsigned __int64 __popcnt64(unsigned __int64);
  311. static __inline__
  312. unsigned char __readgsbyte(unsigned long);
  313. static __inline__
  314. unsigned long __readgsdword(unsigned long);
  315. static __inline__
  316. unsigned __int64 __readgsqword(unsigned long);
  317. unsigned short __readgsword(unsigned long);
  318. unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
  319. unsigned __int64 _HighPart,
  320. unsigned char _Shift);
  321. unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
  322. unsigned __int64 _HighPart,
  323. unsigned char _Shift);
  324. static __inline__
  325. void __stosq(unsigned __int64 *, unsigned __int64, size_t);
  326. unsigned char __vmx_on(unsigned __int64 *);
  327. unsigned char __vmx_vmclear(unsigned __int64 *);
  328. unsigned char __vmx_vmlaunch(void);
  329. unsigned char __vmx_vmptrld(unsigned __int64 *);
  330. unsigned char __vmx_vmread(size_t, size_t *);
  331. unsigned char __vmx_vmresume(void);
  332. unsigned char __vmx_vmwrite(size_t, size_t);
  333. void __writegsbyte(unsigned long, unsigned char);
  334. void __writegsdword(unsigned long, unsigned long);
  335. void __writegsqword(unsigned long, unsigned __int64);
  336. void __writegsword(unsigned long, unsigned short);
  337. static __inline__
  338. unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
  339. static __inline__
  340. unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
  341. static __inline__
  342. unsigned char _bittest64(__int64 const *, __int64);
  343. static __inline__
  344. unsigned char _bittestandcomplement64(__int64 *, __int64);
  345. static __inline__
  346. unsigned char _bittestandreset64(__int64 *, __int64);
  347. static __inline__
  348. unsigned char _bittestandset64(__int64 *, __int64);
  349. unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
  350. long _InterlockedAnd_np(long volatile *_Value, long _Mask);
  351. short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
  352. __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
  353. char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
  354. unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
  355. static __inline__
  356. unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
  357. long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
  358. long _Comparand);
  359. unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
  360. __int64 _ExchangeHigh,
  361. __int64 _ExchangeLow,
  362. __int64 *_CompareandResult);
  363. unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
  364. __int64 _ExchangeHigh,
  365. __int64 _ExchangeLow,
  366. __int64 *_ComparandResult);
  367. short _InterlockedCompareExchange16_np(short volatile *_Destination,
  368. short _Exchange, short _Comparand);
  369. __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
  370. __int64);
  371. __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
  372. __int64);
  373. __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
  374. __int64 _Exchange, __int64 _Comparand);
  375. void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
  376. void *_Exchange, void *_Comparand);
  377. void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
  378. void *_Exchange, void *_Comparand);
  379. static __inline__
  380. __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
  381. static __inline__
  382. __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
  383. static __inline__
  384. __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
  385. void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
  386. static __inline__
  387. __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
  388. long _InterlockedOr_np(long volatile *_Value, long _Mask);
  389. short _InterlockedOr16_np(short volatile *_Value, short _Mask);
  390. static __inline__
  391. __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
  392. __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
  393. char _InterlockedOr8_np(char volatile *_Value, char _Mask);
  394. long _InterlockedXor_np(long volatile *_Value, long _Mask);
  395. short _InterlockedXor16_np(short volatile *_Value, short _Mask);
  396. static __inline__
  397. __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
  398. __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
  399. char _InterlockedXor8_np(char volatile *_Value, char _Mask);
  400. static __inline__
  401. __int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
  402. __int64 *_HighProduct);
  403. unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
  404. __int64 _sarx_i64(__int64, unsigned int);
  405. #if __STDC_HOSTED__
  406. int __cdecl _setjmpex(jmp_buf);
  407. #endif
  408. unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
  409. unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
  410. /*
  411. * Multiply two 64-bit integers and obtain a 64-bit result.
  412. * The low-half is returned directly and the high half is in an out parameter.
  413. */
  414. static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  415. _umul128(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand,
  416. unsigned __int64 *_HighProduct) {
  417. unsigned __int128 _FullProduct =
  418. (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
  419. *_HighProduct = _FullProduct >> 64;
  420. return _FullProduct;
  421. }
  422. static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  423. __umulh(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand) {
  424. unsigned __int128 _FullProduct =
  425. (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
  426. return _FullProduct >> 64;
  427. }
  428. void __cdecl _xrstor64(void const *, unsigned __int64);
  429. void __cdecl _xsave64(void *, unsigned __int64);
  430. void __cdecl _xsaveopt64(void *, unsigned __int64);
  431. #endif /* __x86_64__ */
  432. /*----------------------------------------------------------------------------*\
  433. |* Bit Twiddling
  434. \*----------------------------------------------------------------------------*/
  435. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  436. _rotl8(unsigned char _Value, unsigned char _Shift) {
  437. _Shift &= 0x7;
  438. return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
  439. }
  440. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  441. _rotr8(unsigned char _Value, unsigned char _Shift) {
  442. _Shift &= 0x7;
  443. return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
  444. }
  445. static __inline__ unsigned short __DEFAULT_FN_ATTRS
  446. _rotl16(unsigned short _Value, unsigned char _Shift) {
  447. _Shift &= 0xf;
  448. return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
  449. }
  450. static __inline__ unsigned short __DEFAULT_FN_ATTRS
  451. _rotr16(unsigned short _Value, unsigned char _Shift) {
  452. _Shift &= 0xf;
  453. return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
  454. }
  455. static __inline__ unsigned int __DEFAULT_FN_ATTRS
  456. _rotl(unsigned int _Value, int _Shift) {
  457. _Shift &= 0x1f;
  458. return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
  459. }
  460. static __inline__ unsigned int __DEFAULT_FN_ATTRS
  461. _rotr(unsigned int _Value, int _Shift) {
  462. _Shift &= 0x1f;
  463. return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
  464. }
  465. static __inline__ unsigned long __DEFAULT_FN_ATTRS
  466. _lrotl(unsigned long _Value, int _Shift) {
  467. _Shift &= 0x1f;
  468. return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
  469. }
  470. static __inline__ unsigned long __DEFAULT_FN_ATTRS
  471. _lrotr(unsigned long _Value, int _Shift) {
  472. _Shift &= 0x1f;
  473. return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
  474. }
  475. static
  476. __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  477. _rotl64(unsigned __int64 _Value, int _Shift) {
  478. _Shift &= 0x3f;
  479. return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
  480. }
  481. static
  482. __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  483. _rotr64(unsigned __int64 _Value, int _Shift) {
  484. _Shift &= 0x3f;
  485. return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
  486. }
  487. /*----------------------------------------------------------------------------*\
  488. |* Bit Counting and Testing
  489. \*----------------------------------------------------------------------------*/
  490. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  491. _BitScanForward(unsigned long *_Index, unsigned long _Mask) {
  492. if (!_Mask)
  493. return 0;
  494. *_Index = __builtin_ctzl(_Mask);
  495. return 1;
  496. }
  497. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  498. _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
  499. if (!_Mask)
  500. return 0;
  501. *_Index = 31 - __builtin_clzl(_Mask);
  502. return 1;
  503. }
  504. static __inline__ unsigned short __DEFAULT_FN_ATTRS
  505. __popcnt16(unsigned short _Value) {
  506. return __builtin_popcount((int)_Value);
  507. }
  508. static __inline__ unsigned int __DEFAULT_FN_ATTRS
  509. __popcnt(unsigned int _Value) {
  510. return __builtin_popcount(_Value);
  511. }
  512. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  513. _bittest(long const *_BitBase, long _BitPos) {
  514. return (*_BitBase >> _BitPos) & 1;
  515. }
  516. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  517. _bittestandcomplement(long *_BitBase, long _BitPos) {
  518. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  519. *_BitBase = *_BitBase ^ (1 << _BitPos);
  520. return _Res;
  521. }
  522. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  523. _bittestandreset(long *_BitBase, long _BitPos) {
  524. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  525. *_BitBase = *_BitBase & ~(1 << _BitPos);
  526. return _Res;
  527. }
  528. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  529. _bittestandset(long *_BitBase, long _BitPos) {
  530. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  531. *_BitBase = *_BitBase | (1 << _BitPos);
  532. return _Res;
  533. }
  534. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  535. _interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
  536. long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
  537. return (_PrevVal >> _BitPos) & 1;
  538. }
  539. #ifdef __x86_64__
  540. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  541. _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
  542. if (!_Mask)
  543. return 0;
  544. *_Index = __builtin_ctzll(_Mask);
  545. return 1;
  546. }
  547. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  548. _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
  549. if (!_Mask)
  550. return 0;
  551. *_Index = 63 - __builtin_clzll(_Mask);
  552. return 1;
  553. }
  554. static __inline__
  555. unsigned __int64 __DEFAULT_FN_ATTRS
  556. __popcnt64(unsigned __int64 _Value) {
  557. return __builtin_popcountll(_Value);
  558. }
  559. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  560. _bittest64(__int64 const *_BitBase, __int64 _BitPos) {
  561. return (*_BitBase >> _BitPos) & 1;
  562. }
  563. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  564. _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
  565. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  566. *_BitBase = *_BitBase ^ (1ll << _BitPos);
  567. return _Res;
  568. }
  569. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  570. _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
  571. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  572. *_BitBase = *_BitBase & ~(1ll << _BitPos);
  573. return _Res;
  574. }
  575. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  576. _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
  577. unsigned char _Res = (*_BitBase >> _BitPos) & 1;
  578. *_BitBase = *_BitBase | (1ll << _BitPos);
  579. return _Res;
  580. }
  581. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  582. _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
  583. long long _PrevVal =
  584. __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
  585. return (_PrevVal >> _BitPos) & 1;
  586. }
  587. #endif
  588. /*----------------------------------------------------------------------------*\
  589. |* Interlocked Exchange Add
  590. \*----------------------------------------------------------------------------*/
  591. static __inline__ char __DEFAULT_FN_ATTRS
  592. _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
  593. return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
  594. }
  595. static __inline__ short __DEFAULT_FN_ATTRS
  596. _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
  597. return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
  598. }
  599. #ifdef __x86_64__
  600. static __inline__ __int64 __DEFAULT_FN_ATTRS
  601. _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
  602. return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
  603. }
  604. #endif
  605. /*----------------------------------------------------------------------------*\
  606. |* Interlocked Exchange Sub
  607. \*----------------------------------------------------------------------------*/
  608. static __inline__ char __DEFAULT_FN_ATTRS
  609. _InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
  610. return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
  611. }
  612. static __inline__ short __DEFAULT_FN_ATTRS
  613. _InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
  614. return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
  615. }
  616. static __inline__ long __DEFAULT_FN_ATTRS
  617. _InterlockedExchangeSub(long volatile *_Subend, long _Value) {
  618. return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
  619. }
  620. #ifdef __x86_64__
  621. static __inline__ __int64 __DEFAULT_FN_ATTRS
  622. _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
  623. return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
  624. }
  625. #endif
  626. /*----------------------------------------------------------------------------*\
  627. |* Interlocked Increment
  628. \*----------------------------------------------------------------------------*/
  629. static __inline__ short __DEFAULT_FN_ATTRS
  630. _InterlockedIncrement16(short volatile *_Value) {
  631. return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
  632. }
  633. #ifdef __x86_64__
  634. static __inline__ __int64 __DEFAULT_FN_ATTRS
  635. _InterlockedIncrement64(__int64 volatile *_Value) {
  636. return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
  637. }
  638. #endif
  639. /*----------------------------------------------------------------------------*\
  640. |* Interlocked Decrement
  641. \*----------------------------------------------------------------------------*/
  642. static __inline__ short __DEFAULT_FN_ATTRS
  643. _InterlockedDecrement16(short volatile *_Value) {
  644. return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
  645. }
  646. #ifdef __x86_64__
  647. static __inline__ __int64 __DEFAULT_FN_ATTRS
  648. _InterlockedDecrement64(__int64 volatile *_Value) {
  649. return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
  650. }
  651. #endif
  652. /*----------------------------------------------------------------------------*\
  653. |* Interlocked And
  654. \*----------------------------------------------------------------------------*/
  655. static __inline__ char __DEFAULT_FN_ATTRS
  656. _InterlockedAnd8(char volatile *_Value, char _Mask) {
  657. return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  658. }
  659. static __inline__ short __DEFAULT_FN_ATTRS
  660. _InterlockedAnd16(short volatile *_Value, short _Mask) {
  661. return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  662. }
  663. static __inline__ long __DEFAULT_FN_ATTRS
  664. _InterlockedAnd(long volatile *_Value, long _Mask) {
  665. return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  666. }
  667. #ifdef __x86_64__
  668. static __inline__ __int64 __DEFAULT_FN_ATTRS
  669. _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
  670. return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  671. }
  672. #endif
  673. /*----------------------------------------------------------------------------*\
  674. |* Interlocked Or
  675. \*----------------------------------------------------------------------------*/
  676. static __inline__ char __DEFAULT_FN_ATTRS
  677. _InterlockedOr8(char volatile *_Value, char _Mask) {
  678. return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  679. }
  680. static __inline__ short __DEFAULT_FN_ATTRS
  681. _InterlockedOr16(short volatile *_Value, short _Mask) {
  682. return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  683. }
  684. static __inline__ long __DEFAULT_FN_ATTRS
  685. _InterlockedOr(long volatile *_Value, long _Mask) {
  686. return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  687. }
  688. #ifdef __x86_64__
  689. static __inline__ __int64 __DEFAULT_FN_ATTRS
  690. _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
  691. return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  692. }
  693. #endif
  694. /*----------------------------------------------------------------------------*\
  695. |* Interlocked Xor
  696. \*----------------------------------------------------------------------------*/
  697. static __inline__ char __DEFAULT_FN_ATTRS
  698. _InterlockedXor8(char volatile *_Value, char _Mask) {
  699. return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  700. }
  701. static __inline__ short __DEFAULT_FN_ATTRS
  702. _InterlockedXor16(short volatile *_Value, short _Mask) {
  703. return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  704. }
  705. static __inline__ long __DEFAULT_FN_ATTRS
  706. _InterlockedXor(long volatile *_Value, long _Mask) {
  707. return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  708. }
  709. #ifdef __x86_64__
  710. static __inline__ __int64 __DEFAULT_FN_ATTRS
  711. _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
  712. return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
  713. }
  714. #endif
  715. /*----------------------------------------------------------------------------*\
  716. |* Interlocked Exchange
  717. \*----------------------------------------------------------------------------*/
  718. static __inline__ char __DEFAULT_FN_ATTRS
  719. _InterlockedExchange8(char volatile *_Target, char _Value) {
  720. __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
  721. return _Value;
  722. }
  723. static __inline__ short __DEFAULT_FN_ATTRS
  724. _InterlockedExchange16(short volatile *_Target, short _Value) {
  725. __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
  726. return _Value;
  727. }
  728. #ifdef __x86_64__
  729. static __inline__ __int64 __DEFAULT_FN_ATTRS
  730. _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
  731. __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
  732. return _Value;
  733. }
  734. #endif
  735. /*----------------------------------------------------------------------------*\
  736. |* Interlocked Compare Exchange
  737. \*----------------------------------------------------------------------------*/
  738. static __inline__ char __DEFAULT_FN_ATTRS
  739. _InterlockedCompareExchange8(char volatile *_Destination,
  740. char _Exchange, char _Comparand) {
  741. __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
  742. __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
  743. return _Comparand;
  744. }
  745. static __inline__ short __DEFAULT_FN_ATTRS
  746. _InterlockedCompareExchange16(short volatile *_Destination,
  747. short _Exchange, short _Comparand) {
  748. __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
  749. __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
  750. return _Comparand;
  751. }
  752. static __inline__ __int64 __DEFAULT_FN_ATTRS
  753. _InterlockedCompareExchange64(__int64 volatile *_Destination,
  754. __int64 _Exchange, __int64 _Comparand) {
  755. __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
  756. __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
  757. return _Comparand;
  758. }
  759. /*----------------------------------------------------------------------------*\
  760. |* Barriers
  761. \*----------------------------------------------------------------------------*/
  762. #if defined(__i386__) || defined(__x86_64__)
  763. static __inline__ void __DEFAULT_FN_ATTRS
  764. __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
  765. _ReadWriteBarrier(void) {
  766. __asm__ volatile ("" : : : "memory");
  767. }
  768. static __inline__ void __DEFAULT_FN_ATTRS
  769. __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
  770. _ReadBarrier(void) {
  771. __asm__ volatile ("" : : : "memory");
  772. }
  773. static __inline__ void __DEFAULT_FN_ATTRS
  774. __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
  775. _WriteBarrier(void) {
  776. __asm__ volatile ("" : : : "memory");
  777. }
  778. #endif
  779. #ifdef __x86_64__
  780. static __inline__ void __DEFAULT_FN_ATTRS
  781. __faststorefence(void) {
  782. __asm__ volatile("lock orq $0, (%%rsp)" : : : "memory");
  783. }
  784. #endif
  785. /*----------------------------------------------------------------------------*\
  786. |* readfs, readgs
  787. |* (Pointers in address space #256 and #257 are relative to the GS and FS
  788. |* segment registers, respectively.)
  789. \*----------------------------------------------------------------------------*/
  790. #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
  791. ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
  792. (__offset))
  793. #ifdef __i386__
  794. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  795. __readfsbyte(unsigned long __offset) {
  796. return *__ptr_to_addr_space(257, unsigned char, __offset);
  797. }
  798. static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  799. __readfsqword(unsigned long __offset) {
  800. return *__ptr_to_addr_space(257, unsigned __int64, __offset);
  801. }
  802. static __inline__ unsigned short __DEFAULT_FN_ATTRS
  803. __readfsword(unsigned long __offset) {
  804. return *__ptr_to_addr_space(257, unsigned short, __offset);
  805. }
  806. #endif
  807. #ifdef __x86_64__
  808. static __inline__ unsigned char __DEFAULT_FN_ATTRS
  809. __readgsbyte(unsigned long __offset) {
  810. return *__ptr_to_addr_space(256, unsigned char, __offset);
  811. }
  812. static __inline__ unsigned long __DEFAULT_FN_ATTRS
  813. __readgsdword(unsigned long __offset) {
  814. return *__ptr_to_addr_space(256, unsigned long, __offset);
  815. }
  816. static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  817. __readgsqword(unsigned long __offset) {
  818. return *__ptr_to_addr_space(256, unsigned __int64, __offset);
  819. }
  820. static __inline__ unsigned short __DEFAULT_FN_ATTRS
  821. __readgsword(unsigned long __offset) {
  822. return *__ptr_to_addr_space(256, unsigned short, __offset);
  823. }
  824. #endif
  825. #undef __ptr_to_addr_space
  826. /*----------------------------------------------------------------------------*\
  827. |* movs, stos
  828. \*----------------------------------------------------------------------------*/
  829. #if defined(__i386__) || defined(__x86_64__)
  830. static __inline__ void __DEFAULT_FN_ATTRS
  831. __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
  832. __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)
  833. : "%edi", "%esi", "%ecx");
  834. }
  835. static __inline__ void __DEFAULT_FN_ATTRS
  836. __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
  837. __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)
  838. : "%edi", "%esi", "%ecx");
  839. }
  840. static __inline__ void __DEFAULT_FN_ATTRS
  841. __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
  842. __asm__("rep movsh" : : "D"(__dst), "S"(__src), "c"(__n)
  843. : "%edi", "%esi", "%ecx");
  844. }
  845. static __inline__ void __DEFAULT_FN_ATTRS
  846. __stosb(unsigned char *__dst, unsigned char __x, size_t __n) {
  847. __asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n)
  848. : "%edi", "%ecx");
  849. }
  850. static __inline__ void __DEFAULT_FN_ATTRS
  851. __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
  852. __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)
  853. : "%edi", "%ecx");
  854. }
  855. static __inline__ void __DEFAULT_FN_ATTRS
  856. __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
  857. __asm__("rep stosh" : : "D"(__dst), "a"(__x), "c"(__n)
  858. : "%edi", "%ecx");
  859. }
  860. #endif
  861. #ifdef __x86_64__
  862. static __inline__ void __DEFAULT_FN_ATTRS
  863. __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
  864. __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)
  865. : "%edi", "%esi", "%ecx");
  866. }
  867. static __inline__ void __DEFAULT_FN_ATTRS
  868. __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
  869. __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)
  870. : "%edi", "%ecx");
  871. }
  872. #endif
  873. /*----------------------------------------------------------------------------*\
  874. |* Misc
  875. \*----------------------------------------------------------------------------*/
  876. static __inline__ void * __DEFAULT_FN_ATTRS
  877. _AddressOfReturnAddress(void) {
  878. return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
  879. }
  880. static __inline__ void * __DEFAULT_FN_ATTRS
  881. _ReturnAddress(void) {
  882. return __builtin_return_address(0);
  883. }
  884. #if defined(__i386__) || defined(__x86_64__)
  885. static __inline__ void __DEFAULT_FN_ATTRS
  886. __cpuid(int __info[4], int __level) {
  887. __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
  888. : "a"(__level));
  889. }
  890. static __inline__ void __DEFAULT_FN_ATTRS
  891. __cpuidex(int __info[4], int __level, int __ecx) {
  892. __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
  893. : "a"(__level), "c"(__ecx));
  894. }
  895. static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
  896. _xgetbv(unsigned int __xcr_no) {
  897. unsigned int __eax, __edx;
  898. __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
  899. return ((unsigned __int64)__edx << 32) | __eax;
  900. }
  901. static __inline__ void __DEFAULT_FN_ATTRS
  902. __halt(void) {
  903. __asm__ volatile ("hlt");
  904. }
  905. #endif
  906. /*----------------------------------------------------------------------------*\
  907. |* Privileged intrinsics
  908. \*----------------------------------------------------------------------------*/
  909. #if defined(__i386__) || defined(__x86_64__)
  910. static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
  911. __readmsr(unsigned long __register) {
  912. // Loads the contents of a 64-bit model specific register (MSR) specified in
  913. // the ECX register into registers EDX:EAX. The EDX register is loaded with
  914. // the high-order 32 bits of the MSR and the EAX register is loaded with the
  915. // low-order 32 bits. If less than 64 bits are implemented in the MSR being
  916. // read, the values returned to EDX:EAX in unimplemented bit locations are
  917. // undefined.
  918. unsigned long __edx;
  919. unsigned long __eax;
  920. __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
  921. return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
  922. }
  923. static __inline__ unsigned long __DEFAULT_FN_ATTRS
  924. __readcr3(void) {
  925. unsigned long __cr3_val;
  926. __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
  927. return __cr3_val;
  928. }
  929. static __inline__ void __DEFAULT_FN_ATTRS
  930. __writecr3(unsigned int __cr3_val) {
  931. __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
  932. }
  933. #endif
  934. #ifdef __cplusplus
  935. }
  936. #endif
  937. #undef __DEFAULT_FN_ATTRS
  938. #endif /* __INTRIN_H */
  939. #endif /* _MSC_VER */