Utils.hpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780
  1. /*
  2. * Copyright (c)2013-2020 ZeroTier, Inc.
  3. *
  4. * Use of this software is governed by the Business Source License included
  5. * in the LICENSE.TXT file in the project's root directory.
  6. *
  7. * Change Date: 2025-01-01
  8. *
  9. * On the date above, in accordance with the Business Source License, use
  10. * of this software will be governed by version 2.0 of the Apache License.
  11. */
  12. /****/
  13. #ifndef ZT_UTILS_HPP
  14. #define ZT_UTILS_HPP
  15. #include "Constants.hpp"
  16. #include <stddef.h>
  17. #include <stdarg.h>
  18. #include <utility>
  19. #include <algorithm>
  20. #include <memory>
  21. #include <stdexcept>
  22. namespace ZeroTier {
  23. namespace Utils {
  24. #ifndef __WINDOWS__
  25. #include <sys/mman.h>
  26. #endif
  27. // Macros to convert endian-ness at compile time for constants.
  28. #if __BYTE_ORDER == __LITTLE_ENDIAN
  29. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)((uint16_t)((uint16_t)(x) << 8U) | (uint16_t)((uint16_t)(x) >> 8U)))
  30. #define ZT_CONST_TO_BE_UINT64(x) ( \
  31. (((uint64_t)(x) & 0x00000000000000ffULL) << 56U) | \
  32. (((uint64_t)(x) & 0x000000000000ff00ULL) << 40U) | \
  33. (((uint64_t)(x) & 0x0000000000ff0000ULL) << 24U) | \
  34. (((uint64_t)(x) & 0x00000000ff000000ULL) << 8U) | \
  35. (((uint64_t)(x) & 0x000000ff00000000ULL) >> 8U) | \
  36. (((uint64_t)(x) & 0x0000ff0000000000ULL) >> 24U) | \
  37. (((uint64_t)(x) & 0x00ff000000000000ULL) >> 40U) | \
  38. (((uint64_t)(x) & 0xff00000000000000ULL) >> 56U))
  39. #else
  40. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)(x))
  41. #define ZT_CONST_TO_BE_UINT64(x) ((uint64_t)(x))
  42. #endif
  43. #define ZT_ROR64(x, r) (((x) >> (r)) | ((x) << (64 - (r))))
  44. #define ZT_ROL64(x, r) (((x) << (r)) | ((x) >> (64 - (r))))
  45. #define ZT_ROR32(x, r) (((x) >> (r)) | ((x) << (32 - (r))))
  46. #define ZT_ROL32(x, r) (((x) << (r)) | ((x) >> (32 - (r))))
  47. #ifdef ZT_ARCH_ARM_HAS_NEON
  48. struct ARMCapabilities
  49. {
  50. ARMCapabilities() noexcept;
  51. bool aes;
  52. bool crc32;
  53. bool pmull;
  54. bool sha1;
  55. bool sha2;
  56. };
  57. extern const ARMCapabilities ARMCAP;
  58. #endif
  59. #ifdef ZT_ARCH_X64
  60. struct CPUIDRegisters
  61. {
  62. CPUIDRegisters() noexcept;
  63. bool rdrand;
  64. bool aes;
  65. bool avx;
  66. bool vaes; // implies AVX
  67. bool vpclmulqdq; // implies AVX
  68. bool avx2;
  69. bool avx512f;
  70. bool sha;
  71. bool fsrm;
  72. };
  73. extern const CPUIDRegisters CPUID;
  74. #endif
  75. extern const std::bad_alloc BadAllocException;
  76. extern const std::out_of_range OutOfRangeException;
  77. /**
  78. * 256 zero bits / 32 zero bytes
  79. */
  80. extern const uint64_t ZERO256[4];
  81. /**
  82. * Hexadecimal characters 0-f
  83. */
  84. extern const char HEXCHARS[16];
  85. /**
  86. * A random integer generated at startup for Map's hash bucket calculation.
  87. */
  88. extern const uint64_t s_mapNonce;
  89. /**
  90. * Lock memory to prevent swapping out to secondary storage (if possible)
  91. *
  92. * This is used to attempt to prevent the swapping out of long-term stored secure
  93. * credentials like secret keys. It isn't supported on all platforms and may not
  94. * be absolutely guaranteed to work, but it's a countermeasure.
  95. *
  96. * @param p Memory to lock
  97. * @param l Size of memory
  98. */
  99. static ZT_INLINE void memoryLock(const void *const p, const unsigned int l) noexcept
  100. {
  101. #ifdef __WINDOWS__
  102. VirtualLock(reinterpret_cast<LPVOID>(const_cast<void*>(p)), l);
  103. #else
  104. mlock(p, l);
  105. #endif
  106. }
  107. /**
  108. * Unlock memory locked with memoryLock()
  109. *
  110. * @param p Memory to unlock
  111. * @param l Size of memory
  112. */
  113. static ZT_INLINE void memoryUnlock(const void *const p, const unsigned int l) noexcept
  114. {
  115. #ifdef __WINDOWS__
  116. VirtualUnlock(reinterpret_cast<LPVOID>(const_cast<void*>(p)), l);
  117. #else
  118. munlock(p, l);
  119. #endif
  120. }
  121. /**
  122. * Perform a time-invariant binary comparison
  123. *
  124. * @param a First binary string
  125. * @param b Second binary string
  126. * @param len Length of strings
  127. * @return True if strings are equal
  128. */
  129. bool secureEq(const void *a, const void *b, unsigned int len) noexcept;
  130. /**
  131. * Be absolutely sure to zero memory
  132. *
  133. * This uses a few tricks to make sure the compiler doesn't optimize it
  134. * out, including passing the memory as volatile.
  135. *
  136. * @param ptr Memory to zero
  137. * @param len Length of memory in bytes
  138. */
  139. void burn(volatile void *ptr, unsigned int len);
  140. /**
  141. * @param n Number to convert
  142. * @param s Buffer, at least 24 bytes in size
  143. * @return String containing 'n' in base 10 form
  144. */
  145. char *decimal(unsigned long n, char s[24]) noexcept;
  146. /**
  147. * Convert an unsigned integer into hex
  148. *
  149. * @param i Any unsigned integer
  150. * @param s Buffer to receive hex, must be at least (2*sizeof(i))+1 in size or overflow will occur.
  151. * @return Pointer to s containing hex string with trailing zero byte
  152. */
  153. char *hex(uint64_t i, char buf[17]) noexcept;
  154. /**
  155. * Decode an unsigned integer in hex format
  156. *
  157. * @param s String to decode, non-hex chars are ignored
  158. * @return Unsigned integer
  159. */
  160. uint64_t unhex(const char *s) noexcept;
  161. /**
  162. * Convert a byte array into hex
  163. *
  164. * @param d Bytes
  165. * @param l Length of bytes
  166. * @param s String buffer, must be at least (l*2)+1 in size or overflow will occur
  167. * @return Pointer to filled string buffer
  168. */
  169. char *hex(const void *d, unsigned int l, char *s) noexcept;
  170. /**
  171. * Decode a hex string
  172. *
  173. * @param h Hex C-string (non hex chars are ignored)
  174. * @param hlen Maximum length of string (will stop at terminating zero)
  175. * @param buf Output buffer
  176. * @param buflen Length of output buffer
  177. * @return Number of written bytes
  178. */
  179. unsigned int unhex(const char *h, unsigned int hlen, void *buf, unsigned int buflen) noexcept;
  180. /**
  181. * Generate secure random bytes
  182. *
  183. * This will try to use whatever OS sources of entropy are available. It's
  184. * guarded by an internal mutex so it's thread-safe.
  185. *
  186. * @param buf Buffer to fill
  187. * @param bytes Number of random bytes to generate
  188. */
  189. void getSecureRandom(void *buf, unsigned int bytes) noexcept;
  190. /**
  191. * @return Secure random 64-bit integer
  192. */
  193. uint64_t getSecureRandomU64() noexcept;
  194. /**
  195. * Encode string to base32
  196. *
  197. * @param data Binary data to encode
  198. * @param length Length of data in bytes
  199. * @param result Result buffer
  200. * @param bufSize Size of result buffer
  201. * @return Number of bytes written
  202. */
  203. int b32e(const uint8_t *data, int length, char *result, int bufSize) noexcept;
  204. /**
  205. * Decode base32 string
  206. *
  207. * @param encoded C-string in base32 format (non-base32 characters are ignored)
  208. * @param result Result buffer
  209. * @param bufSize Size of result buffer
  210. * @return Number of bytes written or -1 on error
  211. */
  212. int b32d(const char *encoded, uint8_t *result, int bufSize) noexcept;
  213. /**
  214. * Get a non-cryptographic random integer.
  215. *
  216. * This should never be used for cryptographic use cases, not even for choosing
  217. * message nonce/IV values if they should not repeat. It should only be used when
  218. * a fast and potentially "dirty" random source is needed.
  219. */
  220. uint64_t random() noexcept;
  221. /**
  222. * Perform a safe C string copy, ALWAYS null-terminating the result
  223. *
  224. * This will never ever EVER result in dest[] not being null-terminated
  225. * regardless of any input parameter (other than len==0 which is invalid).
  226. *
  227. * @param dest Destination buffer (must not be NULL)
  228. * @param len Length of dest[] (if zero, false is returned and nothing happens)
  229. * @param src Source string (if NULL, dest will receive a zero-length string and true is returned)
  230. * @return True on success, false on overflow (buffer will still be 0-terminated)
  231. */
  232. bool scopy(char *dest, unsigned int len, const char *src) noexcept;
  233. /**
  234. * Check if a buffer's contents are all zero
  235. */
  236. static ZT_INLINE bool allZero(const void *const b, unsigned int l) noexcept
  237. {
  238. const uint8_t *p = reinterpret_cast<const uint8_t *>(b);
  239. #ifndef ZT_NO_UNALIGNED_ACCESS
  240. while (l >= 8) {
  241. if (*reinterpret_cast<const uint64_t *>(p) != 0)
  242. return false;
  243. p += 8;
  244. l -= 8;
  245. }
  246. #endif
  247. for (unsigned int i = 0; i < l; ++i) {
  248. if (reinterpret_cast<const uint8_t *>(p)[i] != 0)
  249. return false;
  250. }
  251. return true;
  252. }
  253. /**
  254. * Wrapper around reentrant strtok functions, which differ in name by platform
  255. *
  256. * @param str String to tokenize or NULL for subsequent calls
  257. * @param delim Delimiter
  258. * @param saveptr Pointer to pointer where function can save state
  259. * @return Next token or NULL if none
  260. */
  261. static ZT_INLINE char *stok(char *str, const char *delim, char **saveptr) noexcept
  262. {
  263. #ifdef __WINDOWS__
  264. return strtok_s(str,delim,saveptr);
  265. #else
  266. return strtok_r(str, delim, saveptr);
  267. #endif
  268. }
  269. static ZT_INLINE unsigned int strToUInt(const char *s) noexcept
  270. { return (unsigned int)strtoul(s, nullptr, 10); }
  271. static ZT_INLINE unsigned long long hexStrToU64(const char *s) noexcept
  272. {
  273. #ifdef __WINDOWS__
  274. return (unsigned long long)_strtoui64(s,nullptr,16);
  275. #else
  276. return strtoull(s, nullptr, 16);
  277. #endif
  278. }
  279. #ifdef __GNUC__
  280. static ZT_INLINE unsigned int countBits(const uint8_t v) noexcept
  281. { return (unsigned int)__builtin_popcount((unsigned int)v); }
  282. static ZT_INLINE unsigned int countBits(const uint16_t v) noexcept
  283. { return (unsigned int)__builtin_popcount((unsigned int)v); }
  284. static ZT_INLINE unsigned int countBits(const uint32_t v) noexcept
  285. { return (unsigned int)__builtin_popcountl((unsigned long)v); }
  286. static ZT_INLINE unsigned int countBits(const uint64_t v) noexcept
  287. { return (unsigned int)__builtin_popcountll((unsigned long long)v); }
  288. #else
  289. template<typename T>
  290. static ZT_INLINE unsigned int countBits(T v) noexcept
  291. {
  292. v = v - ((v >> 1) & (T)~(T)0/3);
  293. v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3);
  294. v = (v + (v >> 4)) & (T)~(T)0/255*15;
  295. return (unsigned int)((v * ((~((T)0))/((T)255))) >> ((sizeof(T) - 1) * 8));
  296. }
  297. #endif
  298. /**
  299. * Unconditionally swap bytes regardless of host byte order
  300. *
  301. * @param n Integer to swap
  302. * @return Integer with bytes reversed
  303. */
  304. static ZT_INLINE uint64_t swapBytes(const uint64_t n) noexcept
  305. {
  306. #ifdef __GNUC__
  307. return __builtin_bswap64(n);
  308. #else
  309. #ifdef _MSC_VER
  310. return (uint64_t)_byteswap_uint64((unsigned __int64)n);
  311. #else
  312. return (
  313. ((n & 0x00000000000000ffULL) << 56) |
  314. ((n & 0x000000000000ff00ULL) << 40) |
  315. ((n & 0x0000000000ff0000ULL) << 24) |
  316. ((n & 0x00000000ff000000ULL) << 8) |
  317. ((n & 0x000000ff00000000ULL) >> 8) |
  318. ((n & 0x0000ff0000000000ULL) >> 24) |
  319. ((n & 0x00ff000000000000ULL) >> 40) |
  320. ((n & 0xff00000000000000ULL) >> 56)
  321. );
  322. #endif
  323. #endif
  324. }
  325. /**
  326. * Unconditionally swap bytes regardless of host byte order
  327. *
  328. * @param n Integer to swap
  329. * @return Integer with bytes reversed
  330. */
  331. static ZT_INLINE uint32_t swapBytes(const uint32_t n) noexcept
  332. {
  333. #if defined(__GNUC__)
  334. return __builtin_bswap32(n);
  335. #else
  336. #ifdef _MSC_VER
  337. return (uint32_t)_byteswap_ulong((unsigned long)n);
  338. #else
  339. return htonl(n);
  340. #endif
  341. #endif
  342. }
  343. /**
  344. * Unconditionally swap bytes regardless of host byte order
  345. *
  346. * @param n Integer to swap
  347. * @return Integer with bytes reversed
  348. */
  349. static ZT_INLINE uint16_t swapBytes(const uint16_t n) noexcept
  350. {
  351. #if defined(__GNUC__)
  352. return __builtin_bswap16(n);
  353. #else
  354. #ifdef _MSC_VER
  355. return (uint16_t)_byteswap_ushort((unsigned short)n);
  356. #else
  357. return htons(n);
  358. #endif
  359. #endif
  360. }
  361. // These are helper adapters to load and swap integer types special cased by size
  362. // to work with all typedef'd variants, signed/unsigned, etc.
  363. template< typename I, unsigned int S >
  364. class _swap_bytes_bysize;
  365. template< typename I >
  366. class _swap_bytes_bysize< I, 1 >
  367. {
  368. public:
  369. static ZT_INLINE I s(const I n) noexcept
  370. { return n; }
  371. };
  372. template< typename I >
  373. class _swap_bytes_bysize< I, 2 >
  374. {
  375. public:
  376. static ZT_INLINE I s(const I n) noexcept
  377. { return (I)swapBytes((uint16_t)n); }
  378. };
  379. template< typename I >
  380. class _swap_bytes_bysize< I, 4 >
  381. {
  382. public:
  383. static ZT_INLINE I s(const I n) noexcept
  384. { return (I)swapBytes((uint32_t)n); }
  385. };
  386. template< typename I >
  387. class _swap_bytes_bysize< I, 8 >
  388. {
  389. public:
  390. static ZT_INLINE I s(const I n) noexcept
  391. { return (I)swapBytes((uint64_t)n); }
  392. };
  393. template< typename I, unsigned int S >
  394. class _load_be_bysize;
  395. template< typename I >
  396. class _load_be_bysize< I, 1 >
  397. {
  398. public:
  399. static ZT_INLINE I l(const uint8_t *const p) noexcept
  400. { return p[0]; }
  401. };
  402. template< typename I >
  403. class _load_be_bysize< I, 2 >
  404. {
  405. public:
  406. static ZT_INLINE I l(const uint8_t *const p) noexcept
  407. { return (I)(((unsigned int)p[0] << 8U) | (unsigned int)p[1]); }
  408. };
  409. template< typename I >
  410. class _load_be_bysize< I, 4 >
  411. {
  412. public:
  413. static ZT_INLINE I l(const uint8_t *const p) noexcept
  414. { return (I)(((uint32_t)p[0] << 24U) | ((uint32_t)p[1] << 16U) | ((uint32_t)p[2] << 8U) | (uint32_t)p[3]); }
  415. };
  416. template< typename I >
  417. class _load_be_bysize< I, 8 >
  418. {
  419. public:
  420. static ZT_INLINE I l(const uint8_t *const p) noexcept
  421. { return (I)(((uint64_t)p[0] << 56U) | ((uint64_t)p[1] << 48U) | ((uint64_t)p[2] << 40U) | ((uint64_t)p[3] << 32U) | ((uint64_t)p[4] << 24U) | ((uint64_t)p[5] << 16U) | ((uint64_t)p[6] << 8U) | (uint64_t)p[7]); }
  422. };
  423. template< typename I, unsigned int S >
  424. class _load_le_bysize;
  425. template< typename I >
  426. class _load_le_bysize< I, 1 >
  427. {
  428. public:
  429. static ZT_INLINE I l(const uint8_t *const p) noexcept
  430. { return p[0]; }
  431. };
  432. template< typename I >
  433. class _load_le_bysize< I, 2 >
  434. {
  435. public:
  436. static ZT_INLINE I l(const uint8_t *const p) noexcept
  437. { return (I)((unsigned int)p[0] | ((unsigned int)p[1] << 8U)); }
  438. };
  439. template< typename I >
  440. class _load_le_bysize< I, 4 >
  441. {
  442. public:
  443. static ZT_INLINE I l(const uint8_t *const p) noexcept
  444. { return (I)((uint32_t)p[0] | ((uint32_t)p[1] << 8U) | ((uint32_t)p[2] << 16U) | ((uint32_t)p[3] << 24U)); }
  445. };
  446. template< typename I >
  447. class _load_le_bysize< I, 8 >
  448. {
  449. public:
  450. static ZT_INLINE I l(const uint8_t *const p) noexcept
  451. { return (I)((uint64_t)p[0] | ((uint64_t)p[1] << 8U) | ((uint64_t)p[2] << 16U) | ((uint64_t)p[3] << 24U) | ((uint64_t)p[4] << 32U) | ((uint64_t)p[5] << 40U) | ((uint64_t)p[6] << 48U) | ((uint64_t)p[7]) << 56U); }
  452. };
  453. /**
  454. * Convert any signed or unsigned integer type to big-endian ("network") byte order
  455. *
  456. * @tparam I Integer type (usually inferred)
  457. * @param n Value to convert
  458. * @return Value in big-endian order
  459. */
  460. template< typename I >
  461. static ZT_INLINE I hton(const I n) noexcept
  462. {
  463. #if __BYTE_ORDER == __LITTLE_ENDIAN
  464. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  465. #else
  466. return n;
  467. #endif
  468. }
  469. /**
  470. * Convert any signed or unsigned integer type to host byte order from big-endian ("network") byte order
  471. *
  472. * @tparam I Integer type (usually inferred)
  473. * @param n Value to convert
  474. * @return Value in host byte order
  475. */
  476. template< typename I >
  477. static ZT_INLINE I ntoh(const I n) noexcept
  478. {
  479. #if __BYTE_ORDER == __LITTLE_ENDIAN
  480. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  481. #else
  482. return n;
  483. #endif
  484. }
  485. /**
  486. * Copy bits from memory into an integer type without modifying their order
  487. *
  488. * @tparam I Type to load
  489. * @param p Byte stream, must be at least sizeof(I) in size
  490. * @return Loaded raw integer
  491. */
  492. template< typename I >
  493. static ZT_INLINE I loadMachineEndian(const void *const p) noexcept
  494. {
  495. #ifdef ZT_NO_UNALIGNED_ACCESS
  496. I tmp;
  497. for(int i=0;i<(int)sizeof(I);++i)
  498. reinterpret_cast<uint8_t *>(&tmp)[i] = reinterpret_cast<const uint8_t *>(p)[i];
  499. return tmp;
  500. #else
  501. return *reinterpret_cast<const I *>(p);
  502. #endif
  503. }
  504. /**
  505. * Copy bits from memory into an integer type without modifying their order
  506. *
  507. * @tparam I Type to store
  508. * @param p Byte array (must be at least sizeof(I))
  509. * @param i Integer to store
  510. */
  511. template< typename I >
  512. static ZT_INLINE void storeMachineEndian(void *const p, const I i) noexcept
  513. {
  514. #ifdef ZT_NO_UNALIGNED_ACCESS
  515. for(unsigned int k=0;k<sizeof(I);++k)
  516. reinterpret_cast<uint8_t *>(p)[k] = reinterpret_cast<const uint8_t *>(&i)[k];
  517. #else
  518. *reinterpret_cast<I *>(p) = i;
  519. #endif
  520. }
  521. /**
  522. * Decode a big-endian value from a byte stream
  523. *
  524. * @tparam I Type to decode (should be unsigned e.g. uint32_t or uint64_t)
  525. * @param p Byte stream, must be at least sizeof(I) in size
  526. * @return Decoded integer
  527. */
  528. template< typename I >
  529. static ZT_INLINE I loadBigEndian(const void *const p) noexcept
  530. {
  531. #ifdef ZT_NO_UNALIGNED_ACCESS
  532. return _load_be_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  533. #else
  534. return ntoh(*reinterpret_cast<const I *>(p));
  535. #endif
  536. }
  537. /**
  538. * Save an integer in big-endian format
  539. *
  540. * @tparam I Integer type to store (usually inferred)
  541. * @param p Byte stream to write (must be at least sizeof(I))
  542. * #param i Integer to write
  543. */
  544. template< typename I >
  545. static ZT_INLINE void storeBigEndian(void *const p, I i) noexcept
  546. {
  547. #ifdef ZT_NO_UNALIGNED_ACCESS
  548. storeMachineEndian(p,hton(i));
  549. #else
  550. *reinterpret_cast<I *>(p) = hton(i);
  551. #endif
  552. }
  553. /**
  554. * Decode a little-endian value from a byte stream
  555. *
  556. * @tparam I Type to decode
  557. * @param p Byte stream, must be at least sizeof(I) in size
  558. * @return Decoded integer
  559. */
  560. template< typename I >
  561. static ZT_INLINE I loadLittleEndian(const void *const p) noexcept
  562. {
  563. #if __BYTE_ORDER == __BIG_ENDIAN || defined(ZT_NO_UNALIGNED_ACCESS)
  564. return _load_le_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  565. #else
  566. return *reinterpret_cast<const I *>(p);
  567. #endif
  568. }
  569. /**
  570. * Save an integer in little-endian format
  571. *
  572. * @tparam I Integer type to store (usually inferred)
  573. * @param p Byte stream to write (must be at least sizeof(I))
  574. * #param i Integer to write
  575. */
  576. template< typename I >
  577. static ZT_INLINE void storeLittleEndian(void *const p, const I i) noexcept
  578. {
  579. #if __BYTE_ORDER == __BIG_ENDIAN
  580. storeMachineEndian(p,_swap_bytes_bysize<I,sizeof(I)>::s(i));
  581. #else
  582. #ifdef ZT_NO_UNALIGNED_ACCESS
  583. storeMachineEndian(p,i);
  584. #else
  585. *reinterpret_cast<I *>(p) = i;
  586. #endif
  587. #endif
  588. }
  589. /**
  590. * Copy memory block whose size is known at compile time.
  591. *
  592. * @tparam L Size of memory
  593. * @param dest Destination memory
  594. * @param src Source memory
  595. */
  596. template< unsigned long L >
  597. static ZT_INLINE void copy(void *dest, const void *src) noexcept
  598. {
  599. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  600. uintptr_t l = L;
  601. __asm__ __volatile__ ("cld ; rep movsb" : "+c"(l), "+S"(src), "+D"(dest) :: "memory");
  602. #else
  603. memcpy(dest, src, L);
  604. #endif
  605. }
  606. /**
  607. * Copy memory block whose size is known at run time
  608. *
  609. * @param dest Destination memory
  610. * @param src Source memory
  611. * @param len Bytes to copy
  612. */
  613. static ZT_INLINE void copy(void *dest, const void *src, unsigned long len) noexcept
  614. {
  615. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  616. __asm__ __volatile__ ("cld ; rep movsb" : "+c"(len), "+S"(src), "+D"(dest) :: "memory");
  617. #else
  618. memcpy(dest, src, len);
  619. #endif
  620. }
  621. /**
  622. * Zero memory block whose size is known at compile time
  623. *
  624. * @tparam L Size in bytes
  625. * @param dest Memory to zero
  626. */
  627. template< unsigned long L >
  628. static ZT_INLINE void zero(void *dest) noexcept
  629. {
  630. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  631. uintptr_t l = L;
  632. __asm__ __volatile__ ("cld ; rep stosb" :"+c" (l), "+D" (dest) : "a" (0) : "memory");
  633. #else
  634. memset(dest, 0, L);
  635. #endif
  636. }
  637. /**
  638. * Zero memory block whose size is known at run time
  639. *
  640. * @param dest Memory to zero
  641. * @param len Size in bytes
  642. */
  643. static ZT_INLINE void zero(void *dest, unsigned long len) noexcept
  644. {
  645. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  646. __asm__ __volatile__ ("cld ; rep stosb" :"+c" (len), "+D" (dest) : "a" (0) : "memory");
  647. #else
  648. memset(dest, 0, len);
  649. #endif
  650. }
  651. /**
  652. * Compute 32-bit FNV-1a checksum
  653. *
  654. * See: http://www.isthe.com/chongo/tech/comp/fnv/
  655. *
  656. * @param data Data to checksum
  657. * @param len Length of data
  658. * @return FNV1a checksum
  659. */
  660. uint32_t fnv1a32(const void *data, unsigned int len) noexcept;
  661. /**
  662. * Mix bits in a 64-bit integer (non-cryptographic, for hash tables)
  663. *
  664. * https://nullprogram.com/blog/2018/07/31/
  665. *
  666. * @param x Integer to mix
  667. * @return Hashed value
  668. */
  669. static ZT_INLINE uint64_t hash64(uint64_t x) noexcept
  670. {
  671. x ^= x >> 30U;
  672. x *= 0xbf58476d1ce4e5b9ULL;
  673. x ^= x >> 27U;
  674. x *= 0x94d049bb133111ebULL;
  675. x ^= x >> 31U;
  676. return x;
  677. }
  678. /**
  679. * Mix bits in a 32-bit integer (non-cryptographic, for hash tables)
  680. *
  681. * https://nullprogram.com/blog/2018/07/31/
  682. *
  683. * @param x Integer to mix
  684. * @return Hashed value
  685. */
  686. static ZT_INLINE uint32_t hash32(uint32_t x) noexcept
  687. {
  688. x ^= x >> 16U;
  689. x *= 0x7feb352dU;
  690. x ^= x >> 15U;
  691. x *= 0x846ca68bU;
  692. x ^= x >> 16U;
  693. return x;
  694. }
  695. } // namespace Utils
  696. } // namespace ZeroTier
  697. #endif