Utils.hpp 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901
  1. /*
  2. * Copyright (c)2013-2020 ZeroTier, Inc.
  3. *
  4. * Use of this software is governed by the Business Source License included
  5. * in the LICENSE.TXT file in the project's root directory.
  6. *
  7. * Change Date: 2024-01-01
  8. *
  9. * On the date above, in accordance with the Business Source License, use
  10. * of this software will be governed by version 2.0 of the Apache License.
  11. */
  12. /****/
  13. #ifndef ZT_UTILS_HPP
  14. #define ZT_UTILS_HPP
  15. #include "Constants.hpp"
  16. #include <utility>
  17. #include <algorithm>
  18. #include <memory>
  19. namespace ZeroTier {
  20. namespace Utils {
  21. #ifndef __WINDOWS__
  22. #include <sys/mman.h>
  23. #endif
  24. // Macros to convert endian-ness at compile time for constants.
  25. #if __BYTE_ORDER == __LITTLE_ENDIAN
  26. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)((uint16_t)((uint16_t)(x) << 8U) | (uint16_t)((uint16_t)(x) >> 8U)))
  27. #define ZT_CONST_TO_BE_UINT64(x) ( \
  28. (((uint64_t)(x) & 0x00000000000000ffULL) << 56U) | \
  29. (((uint64_t)(x) & 0x000000000000ff00ULL) << 40U) | \
  30. (((uint64_t)(x) & 0x0000000000ff0000ULL) << 24U) | \
  31. (((uint64_t)(x) & 0x00000000ff000000ULL) << 8U) | \
  32. (((uint64_t)(x) & 0x000000ff00000000ULL) >> 8U) | \
  33. (((uint64_t)(x) & 0x0000ff0000000000ULL) >> 24U) | \
  34. (((uint64_t)(x) & 0x00ff000000000000ULL) >> 40U) | \
  35. (((uint64_t)(x) & 0xff00000000000000ULL) >> 56U))
  36. #else
  37. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)(x))
  38. #define ZT_CONST_TO_BE_UINT64(x) ((uint64_t)(x))
  39. #endif
  40. #define ZT_ROR64(x, r) (((x) >> (r)) | ((x) << (64 - (r))))
  41. #define ZT_ROL64(x, r) (((x) << (r)) | ((x) >> (64 - (r))))
  42. #define ZT_ROR32(x, r) (((x) >> (r)) | ((x) << (32 - (r))))
  43. #define ZT_ROL32(x, r) (((x) << (r)) | ((x) >> (32 - (r))))
  44. #ifdef ZT_ARCH_X64
  45. struct CPUIDRegisters
  46. {
  47. CPUIDRegisters() noexcept;
  48. bool rdrand;
  49. bool aes;
  50. bool avx;
  51. bool vaes; // implies AVX
  52. bool vpclmulqdq; // implies AVX
  53. bool avx2;
  54. bool avx512f;
  55. bool sha;
  56. bool fsrm;
  57. };
  58. extern const CPUIDRegisters CPUID;
  59. #endif
  60. /**
  61. * 256 zero bits / 32 zero bytes
  62. */
  63. extern const uint64_t ZERO256[4];
  64. /**
  65. * Hexadecimal characters 0-f
  66. */
  67. extern const char HEXCHARS[16];
  68. /**
  69. * A random integer generated at startup for Map's hash bucket calculation.
  70. */
  71. extern const uint64_t s_mapNonce;
  72. /**
  73. * Lock memory to prevent swapping out to secondary storage (if possible)
  74. *
  75. * This is used to attempt to prevent the swapping out of long-term stored secure
  76. * credentials like secret keys. It isn't supported on all platforms and may not
  77. * be absolutely guaranteed to work, but it's a countermeasure.
  78. *
  79. * @param p Memory to lock
  80. * @param l Size of memory
  81. */
  82. static ZT_INLINE void memoryLock(const void *const p, const unsigned int l) noexcept
  83. {
  84. #ifndef __WINDOWS__
  85. mlock(p, l);
  86. #endif
  87. }
  88. /**
  89. * Unlock memory locked with memoryLock()
  90. *
  91. * @param p Memory to unlock
  92. * @param l Size of memory
  93. */
  94. static ZT_INLINE void memoryUnlock(const void *const p, const unsigned int l) noexcept
  95. {
  96. #ifndef __WINDOWS__
  97. munlock(p, l);
  98. #endif
  99. }
  100. /**
  101. * Perform a time-invariant binary comparison
  102. *
  103. * @param a First binary string
  104. * @param b Second binary string
  105. * @param len Length of strings
  106. * @return True if strings are equal
  107. */
  108. bool secureEq(const void *a, const void *b, unsigned int len) noexcept;
  109. /**
  110. * Be absolutely sure to zero memory
  111. *
  112. * This uses some hacks to be totally sure the compiler does not optimize it out.
  113. *
  114. * @param ptr Memory to zero
  115. * @param len Length of memory in bytes
  116. */
  117. void burn(void *ptr, unsigned int len);
  118. /**
  119. * @param n Number to convert
  120. * @param s Buffer, at least 24 bytes in size
  121. * @return String containing 'n' in base 10 form
  122. */
  123. char *decimal(unsigned long n, char s[24]) noexcept;
  124. /**
  125. * Convert an unsigned integer into hex
  126. *
  127. * The returned pointer won't point to the start of 'buf', since
  128. * hex writing is done in reverse order.
  129. *
  130. * @param i Any unsigned integer
  131. * @param s Buffer to receive hex, must be at least (2*sizeof(i))+1 in size or overflow will occur.
  132. * @return Pointer to s containing hex string with trailing zero byte
  133. */
  134. char *hex(uint64_t i, char buf[17]) noexcept;
  135. /**
  136. * Decode an unsigned integer in hex format
  137. *
  138. * @param s String to decode, non-hex chars are ignored
  139. * @return Unsigned integer
  140. */
  141. uint64_t unhex(const char *s) noexcept;
  142. /**
  143. * Convert a byte array into hex
  144. *
  145. * @param d Bytes
  146. * @param l Length of bytes
  147. * @param s String buffer, must be at least (l*2)+1 in size or overflow will occur
  148. * @return Pointer to filled string buffer
  149. */
  150. char *hex(const void *d, unsigned int l, char *s) noexcept;
  151. /**
  152. * Decode a hex string
  153. *
  154. * @param h Hex C-string (non hex chars are ignored)
  155. * @param hlen Maximum length of string (will stop at terminating zero)
  156. * @param buf Output buffer
  157. * @param buflen Length of output buffer
  158. * @return Number of written bytes
  159. */
  160. unsigned int unhex(const char *h, unsigned int hlen, void *buf, unsigned int buflen) noexcept;
  161. /**
  162. * Generate secure random bytes
  163. *
  164. * This will try to use whatever OS sources of entropy are available. It's
  165. * guarded by an internal mutex so it's thread-safe.
  166. *
  167. * @param buf Buffer to fill
  168. * @param bytes Number of random bytes to generate
  169. */
  170. void getSecureRandom(void *buf, unsigned int bytes) noexcept;
  171. /**
  172. * @return Secure random 64-bit integer
  173. */
  174. uint64_t getSecureRandomU64() noexcept;
  175. /**
  176. * Encode string to base32
  177. *
  178. * @param data Binary data to encode
  179. * @param length Length of data in bytes
  180. * @param result Result buffer
  181. * @param bufSize Size of result buffer
  182. * @return Number of bytes written
  183. */
  184. int b32e(const uint8_t *data, int length, char *result, int bufSize) noexcept;
  185. /**
  186. * Decode base32 string
  187. *
  188. * @param encoded C-string in base32 format (non-base32 characters are ignored)
  189. * @param result Result buffer
  190. * @param bufSize Size of result buffer
  191. * @return Number of bytes written or -1 on error
  192. */
  193. int b32d(const char *encoded, uint8_t *result, int bufSize) noexcept;
  194. /**
  195. * Get a non-cryptographic random integer.
  196. *
  197. * This should never be used for cryptographic use cases, not even for choosing
  198. * message nonce/IV values if they should not repeat. It should only be used when
  199. * a fast and potentially "dirty" random source is needed.
  200. */
  201. uint64_t random() noexcept;
  202. /**
  203. * Perform a safe C string copy, ALWAYS null-terminating the result
  204. *
  205. * This will never ever EVER result in dest[] not being null-terminated
  206. * regardless of any input parameter (other than len==0 which is invalid).
  207. *
  208. * @param dest Destination buffer (must not be NULL)
  209. * @param len Length of dest[] (if zero, false is returned and nothing happens)
  210. * @param src Source string (if NULL, dest will receive a zero-length string and true is returned)
  211. * @return True on success, false on overflow (buffer will still be 0-terminated)
  212. */
  213. bool scopy(char *dest, unsigned int len, const char *src) noexcept;
  214. /**
  215. * Mix bits in a 64-bit integer (non-cryptographic, for hash tables)
  216. *
  217. * https://nullprogram.com/blog/2018/07/31/
  218. *
  219. * @param x Integer to mix
  220. * @return Hashed value
  221. */
  222. static ZT_INLINE uint64_t hash64(uint64_t x) noexcept
  223. {
  224. x ^= x >> 30U;
  225. x *= 0xbf58476d1ce4e5b9ULL;
  226. x ^= x >> 27U;
  227. x *= 0x94d049bb133111ebULL;
  228. x ^= x >> 31U;
  229. return x;
  230. }
  231. /**
  232. * Mix bits in a 32-bit integer (non-cryptographic, for hash tables)
  233. *
  234. * https://nullprogram.com/blog/2018/07/31/
  235. *
  236. * @param x Integer to mix
  237. * @return Hashed value
  238. */
  239. static ZT_INLINE uint32_t hash32(uint32_t x) noexcept
  240. {
  241. x ^= x >> 16U;
  242. x *= 0x7feb352dU;
  243. x ^= x >> 15U;
  244. x *= 0x846ca68bU;
  245. x ^= x >> 16U;
  246. return x;
  247. }
  248. /**
  249. * Check if a buffer's contents are all zero
  250. */
  251. static ZT_INLINE bool allZero(const void *const b, unsigned int l) noexcept
  252. {
  253. const uint8_t *p = reinterpret_cast<const uint8_t *>(b);
  254. #ifndef ZT_NO_UNALIGNED_ACCESS
  255. while (l >= 8) {
  256. if (*reinterpret_cast<const uint64_t *>(p) != 0)
  257. return false;
  258. p += 8;
  259. l -= 8;
  260. }
  261. #endif
  262. for (unsigned int i = 0; i < l; ++i) {
  263. if (reinterpret_cast<const uint8_t *>(p)[i] != 0)
  264. return false;
  265. }
  266. return true;
  267. }
  268. /**
  269. * Wrapper around reentrant strtok functions, which differ in name by platform
  270. *
  271. * @param str String to tokenize or NULL for subsequent calls
  272. * @param delim Delimiter
  273. * @param saveptr Pointer to pointer where function can save state
  274. * @return Next token or NULL if none
  275. */
  276. static ZT_INLINE char *stok(char *str, const char *delim, char **saveptr) noexcept
  277. {
  278. #ifdef __WINDOWS__
  279. return strtok_s(str,delim,saveptr);
  280. #else
  281. return strtok_r(str, delim, saveptr);
  282. #endif
  283. }
  284. static ZT_INLINE unsigned int strToUInt(const char *s) noexcept
  285. {
  286. return (unsigned int)strtoul(s, nullptr, 10);
  287. }
  288. static ZT_INLINE unsigned long long hexStrToU64(const char *s) noexcept
  289. {
  290. #ifdef __WINDOWS__
  291. return (unsigned long long)_strtoui64(s,nullptr,16);
  292. #else
  293. return strtoull(s, nullptr, 16);
  294. #endif
  295. }
  296. /**
  297. * Compute 32-bit FNV-1a checksum
  298. *
  299. * See: http://www.isthe.com/chongo/tech/comp/fnv/
  300. *
  301. * @param data Data to checksum
  302. * @param len Length of data
  303. * @return FNV1a checksum
  304. */
  305. static ZT_INLINE uint32_t fnv1a32(const void *const data, const unsigned int len) noexcept
  306. {
  307. uint32_t h = 0x811c9dc5;
  308. const uint32_t p = 0x01000193;
  309. for (unsigned int i = 0; i < len; ++i)
  310. h = (h ^ (uint32_t)reinterpret_cast<const uint8_t *>(data)[i]) * p;
  311. return h;
  312. }
  313. #ifdef __GNUC__
  314. static ZT_INLINE unsigned int countBits(const uint8_t v) noexcept
  315. { return (unsigned int)__builtin_popcount((unsigned int)v); }
  316. static ZT_INLINE unsigned int countBits(const uint16_t v) noexcept
  317. { return (unsigned int)__builtin_popcount((unsigned int)v); }
  318. static ZT_INLINE unsigned int countBits(const uint32_t v) noexcept
  319. { return (unsigned int)__builtin_popcountl((unsigned long)v); }
  320. static ZT_INLINE unsigned int countBits(const uint64_t v) noexcept
  321. { return (unsigned int)__builtin_popcountll((unsigned long long)v); }
  322. #else
  323. template<typename T>
  324. static ZT_INLINE unsigned int countBits(T v) noexcept
  325. {
  326. v = v - ((v >> 1) & (T)~(T)0/3);
  327. v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3);
  328. v = (v + (v >> 4)) & (T)~(T)0/255*15;
  329. return (unsigned int)((v * ((~((T)0))/((T)255))) >> ((sizeof(T) - 1) * 8));
  330. }
  331. #endif
  332. /**
  333. * Unconditionally swap bytes regardless of host byte order
  334. *
  335. * @param n Integer to swap
  336. * @return Integer with bytes reversed
  337. */
  338. static ZT_INLINE uint64_t swapBytes(const uint64_t n) noexcept
  339. {
  340. #ifdef __GNUC__
  341. return __builtin_bswap64(n);
  342. #else
  343. #ifdef _MSC_VER
  344. return (uint64_t)_byteswap_uint64((unsigned __int64)n);
  345. #else
  346. return (
  347. ((n & 0x00000000000000ffULL) << 56) |
  348. ((n & 0x000000000000ff00ULL) << 40) |
  349. ((n & 0x0000000000ff0000ULL) << 24) |
  350. ((n & 0x00000000ff000000ULL) << 8) |
  351. ((n & 0x000000ff00000000ULL) >> 8) |
  352. ((n & 0x0000ff0000000000ULL) >> 24) |
  353. ((n & 0x00ff000000000000ULL) >> 40) |
  354. ((n & 0xff00000000000000ULL) >> 56)
  355. );
  356. #endif
  357. #endif
  358. }
  359. /**
  360. * Unconditionally swap bytes regardless of host byte order
  361. *
  362. * @param n Integer to swap
  363. * @return Integer with bytes reversed
  364. */
  365. static ZT_INLINE uint32_t swapBytes(const uint32_t n) noexcept
  366. {
  367. #if defined(__GNUC__)
  368. return __builtin_bswap32(n);
  369. #else
  370. #ifdef _MSC_VER
  371. return (uint32_t)_byteswap_ulong((unsigned long)n);
  372. #else
  373. return htonl(n);
  374. #endif
  375. #endif
  376. }
  377. /**
  378. * Unconditionally swap bytes regardless of host byte order
  379. *
  380. * @param n Integer to swap
  381. * @return Integer with bytes reversed
  382. */
  383. static ZT_INLINE uint16_t swapBytes(const uint16_t n) noexcept
  384. {
  385. #if defined(__GNUC__)
  386. return __builtin_bswap16(n);
  387. #else
  388. #ifdef _MSC_VER
  389. return (uint16_t)_byteswap_ushort((unsigned short)n);
  390. #else
  391. return htons(n);
  392. #endif
  393. #endif
  394. }
  395. // These are helper adapters to load and swap integer types special cased by size
  396. // to work with all typedef'd variants, signed/unsigned, etc.
  397. template< typename I, unsigned int S >
  398. class _swap_bytes_bysize;
  399. template< typename I >
  400. class _swap_bytes_bysize< I, 1 >
  401. {
  402. public:
  403. static ZT_INLINE I s(const I n) noexcept
  404. { return n; }
  405. };
  406. template< typename I >
  407. class _swap_bytes_bysize< I, 2 >
  408. {
  409. public:
  410. static ZT_INLINE I s(const I n) noexcept
  411. { return (I)swapBytes((uint16_t)n); }
  412. };
  413. template< typename I >
  414. class _swap_bytes_bysize< I, 4 >
  415. {
  416. public:
  417. static ZT_INLINE I s(const I n) noexcept
  418. { return (I)swapBytes((uint32_t)n); }
  419. };
  420. template< typename I >
  421. class _swap_bytes_bysize< I, 8 >
  422. {
  423. public:
  424. static ZT_INLINE I s(const I n) noexcept
  425. { return (I)swapBytes((uint64_t)n); }
  426. };
  427. template< typename I, unsigned int S >
  428. class _load_be_bysize;
  429. template< typename I >
  430. class _load_be_bysize< I, 1 >
  431. {
  432. public:
  433. static ZT_INLINE I l(const uint8_t *const p) noexcept
  434. { return p[0]; }
  435. };
  436. template< typename I >
  437. class _load_be_bysize< I, 2 >
  438. {
  439. public:
  440. static ZT_INLINE I l(const uint8_t *const p) noexcept
  441. { return (I)(((unsigned int)p[0] << 8U) | (unsigned int)p[1]); }
  442. };
  443. template< typename I >
  444. class _load_be_bysize< I, 4 >
  445. {
  446. public:
  447. static ZT_INLINE I l(const uint8_t *const p) noexcept
  448. { return (I)(((uint32_t)p[0] << 24U) | ((uint32_t)p[1] << 16U) | ((uint32_t)p[2] << 8U) | (uint32_t)p[3]); }
  449. };
  450. template< typename I >
  451. class _load_be_bysize< I, 8 >
  452. {
  453. public:
  454. static ZT_INLINE I l(const uint8_t *const p) noexcept
  455. { return (I)(((uint64_t)p[0] << 56U) | ((uint64_t)p[1] << 48U) | ((uint64_t)p[2] << 40U) | ((uint64_t)p[3] << 32U) | ((uint64_t)p[4] << 24U) | ((uint64_t)p[5] << 16U) | ((uint64_t)p[6] << 8U) | (uint64_t)p[7]); }
  456. };
  457. template< typename I, unsigned int S >
  458. class _load_le_bysize;
  459. template< typename I >
  460. class _load_le_bysize< I, 1 >
  461. {
  462. public:
  463. static ZT_INLINE I l(const uint8_t *const p) noexcept
  464. { return p[0]; }
  465. };
  466. template< typename I >
  467. class _load_le_bysize< I, 2 >
  468. {
  469. public:
  470. static ZT_INLINE I l(const uint8_t *const p) noexcept
  471. { return (I)((unsigned int)p[0] | ((unsigned int)p[1] << 8U)); }
  472. };
  473. template< typename I >
  474. class _load_le_bysize< I, 4 >
  475. {
  476. public:
  477. static ZT_INLINE I l(const uint8_t *const p) noexcept
  478. { return (I)((uint32_t)p[0] | ((uint32_t)p[1] << 8U) | ((uint32_t)p[2] << 16U) | ((uint32_t)p[3] << 24U)); }
  479. };
  480. template< typename I >
  481. class _load_le_bysize< I, 8 >
  482. {
  483. public:
  484. static ZT_INLINE I l(const uint8_t *const p) noexcept
  485. { return (I)((uint64_t)p[0] | ((uint64_t)p[1] << 8U) | ((uint64_t)p[2] << 16U) | ((uint64_t)p[3] << 24U) | ((uint64_t)p[4] << 32U) | ((uint64_t)p[5] << 40U) | ((uint64_t)p[6] << 48U) | ((uint64_t)p[7]) << 56U); }
  486. };
  487. /**
  488. * Convert any signed or unsigned integer type to big-endian ("network") byte order
  489. *
  490. * @tparam I Integer type (usually inferred)
  491. * @param n Value to convert
  492. * @return Value in big-endian order
  493. */
  494. template< typename I >
  495. static ZT_INLINE I hton(const I n) noexcept
  496. {
  497. #if __BYTE_ORDER == __LITTLE_ENDIAN
  498. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  499. #else
  500. return n;
  501. #endif
  502. }
  503. /**
  504. * Convert any signed or unsigned integer type to host byte order from big-endian ("network") byte order
  505. *
  506. * @tparam I Integer type (usually inferred)
  507. * @param n Value to convert
  508. * @return Value in host byte order
  509. */
  510. template< typename I >
  511. static ZT_INLINE I ntoh(const I n) noexcept
  512. {
  513. #if __BYTE_ORDER == __LITTLE_ENDIAN
  514. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  515. #else
  516. return n;
  517. #endif
  518. }
  519. /**
  520. * Copy bits from memory into an integer type without modifying their order
  521. *
  522. * @tparam I Type to load
  523. * @param p Byte stream, must be at least sizeof(I) in size
  524. * @return Loaded raw integer
  525. */
  526. template< typename I >
  527. static ZT_INLINE I loadAsIsEndian(const void *const p) noexcept
  528. {
  529. #ifdef ZT_NO_UNALIGNED_ACCESS
  530. I tmp;
  531. for(int i=0;i<(int)sizeof(I);++i)
  532. reinterpret_cast<uint8_t *>(&tmp)[i] = reinterpret_cast<const uint8_t *>(p)[i];
  533. return tmp;
  534. #else
  535. return *reinterpret_cast<const I *>(p);
  536. #endif
  537. }
  538. /**
  539. * Copy bits from memory into an integer type without modifying their order
  540. *
  541. * @tparam I Type to store
  542. * @param p Byte array (must be at least sizeof(I))
  543. * @param i Integer to store
  544. */
  545. template< typename I >
  546. static ZT_INLINE void storeAsIsEndian(void *const p, const I i) noexcept
  547. {
  548. #ifdef ZT_NO_UNALIGNED_ACCESS
  549. for(unsigned int k=0;k<sizeof(I);++k)
  550. reinterpret_cast<uint8_t *>(p)[k] = reinterpret_cast<const uint8_t *>(&i)[k];
  551. #else
  552. *reinterpret_cast<I *>(p) = i;
  553. #endif
  554. }
  555. /**
  556. * Decode a big-endian value from a byte stream
  557. *
  558. * @tparam I Type to decode (should be unsigned e.g. uint32_t or uint64_t)
  559. * @param p Byte stream, must be at least sizeof(I) in size
  560. * @return Decoded integer
  561. */
  562. template< typename I >
  563. static ZT_INLINE I loadBigEndian(const void *const p) noexcept
  564. {
  565. #ifdef ZT_NO_UNALIGNED_ACCESS
  566. return _load_be_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  567. #else
  568. return ntoh(*reinterpret_cast<const I *>(p));
  569. #endif
  570. }
  571. /**
  572. * Save an integer in big-endian format
  573. *
  574. * @tparam I Integer type to store (usually inferred)
  575. * @param p Byte stream to write (must be at least sizeof(I))
  576. * #param i Integer to write
  577. */
  578. template< typename I >
  579. static ZT_INLINE void storeBigEndian(void *const p, I i) noexcept
  580. {
  581. #ifdef ZT_NO_UNALIGNED_ACCESS
  582. storeAsIsEndian(p,hton(i));
  583. #else
  584. *reinterpret_cast<I *>(p) = hton(i);
  585. #endif
  586. }
  587. /**
  588. * Decode a little-endian value from a byte stream
  589. *
  590. * @tparam I Type to decode
  591. * @param p Byte stream, must be at least sizeof(I) in size
  592. * @return Decoded integer
  593. */
  594. template< typename I >
  595. static ZT_INLINE I loadLittleEndian(const void *const p) noexcept
  596. {
  597. #if __BYTE_ORDER == __BIG_ENDIAN || defined(ZT_NO_UNALIGNED_ACCESS)
  598. return _load_le_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  599. #else
  600. return *reinterpret_cast<const I *>(p);
  601. #endif
  602. }
  603. /**
  604. * Save an integer in little-endian format
  605. *
  606. * @tparam I Integer type to store (usually inferred)
  607. * @param p Byte stream to write (must be at least sizeof(I))
  608. * #param i Integer to write
  609. */
  610. template< typename I >
  611. static ZT_INLINE void storeLittleEndian(void *const p, const I i) noexcept
  612. {
  613. #if __BYTE_ORDER == __BIG_ENDIAN
  614. storeAsIsEndian(p,_swap_bytes_bysize<I,sizeof(I)>::s(i));
  615. #else
  616. #ifdef ZT_NO_UNALIGNED_ACCESS
  617. storeAsIsEndian(p,i);
  618. #else
  619. *reinterpret_cast<I *>(p) = i;
  620. #endif
  621. #endif
  622. }
  623. /**
  624. * Copy memory block whose size is known at compile time.
  625. *
  626. * @tparam L Size of memory
  627. * @param dest Destination memory
  628. * @param src Source memory
  629. */
  630. template< unsigned int L >
  631. static ZT_INLINE void copy(void *const dest, const void *const src) noexcept
  632. {
  633. #ifdef ZT_ARCH_X64
  634. uint8_t *volatile d = reinterpret_cast<uint8_t *>(dest);
  635. const uint8_t *s = reinterpret_cast<const uint8_t *>(src);
  636. for (unsigned int i = 0; i < (L >> 6U); ++i) {
  637. __m128i x0 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s));
  638. __m128i x1 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s + 16));
  639. __m128i x2 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s + 32));
  640. __m128i x3 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s + 48));
  641. s += 64;
  642. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), x0);
  643. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 16), x1);
  644. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 32), x2);
  645. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 48), x3);
  646. d += 64;
  647. }
  648. if ((L & 32U) != 0) {
  649. __m128i x0 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s));
  650. __m128i x1 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s + 16));
  651. s += 32;
  652. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), x0);
  653. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 16), x1);
  654. d += 32;
  655. }
  656. if ((L & 16U) != 0) {
  657. __m128i x0 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(s));
  658. s += 16;
  659. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), x0);
  660. d += 16;
  661. }
  662. if ((L & 8U) != 0) {
  663. *reinterpret_cast<volatile uint64_t *>(d) = *reinterpret_cast<const uint64_t *>(s);
  664. s += 8;
  665. d += 8;
  666. }
  667. if ((L & 4U) != 0) {
  668. *reinterpret_cast<volatile uint32_t *>(d) = *reinterpret_cast<const uint32_t *>(s);
  669. s += 4;
  670. d += 4;
  671. }
  672. if ((L & 2U) != 0) {
  673. *reinterpret_cast<volatile uint16_t *>(d) = *reinterpret_cast<const uint16_t *>(s);
  674. s += 2;
  675. d += 2;
  676. }
  677. if ((L & 1U) != 0) {
  678. *d = *s;
  679. }
  680. #else
  681. memcpy(dest,src,L);
  682. #endif
  683. }
  684. /**
  685. * Copy memory block whose size is known at run time
  686. *
  687. * @param dest Destination memory
  688. * @param src Source memory
  689. * @param len Bytes to copy
  690. */
  691. static ZT_INLINE void copy(void *const dest, const void *const src, unsigned int len) noexcept
  692. { memcpy(dest, src, len); }
  693. /**
  694. * Zero memory block whose size is known at compile time
  695. *
  696. * @tparam L Size in bytes
  697. * @param dest Memory to zero
  698. */
  699. template< unsigned int L >
  700. static ZT_INLINE void zero(void *const dest) noexcept
  701. {
  702. #ifdef ZT_ARCH_X64
  703. uint8_t *volatile d = reinterpret_cast<uint8_t *>(dest);
  704. __m128i z = _mm_setzero_si128();
  705. for (unsigned int i = 0; i < (L >> 6U); ++i) {
  706. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), z);
  707. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 16), z);
  708. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 32), z);
  709. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 48), z);
  710. d += 64;
  711. }
  712. if ((L & 32U) != 0) {
  713. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), z);
  714. _mm_storeu_si128(reinterpret_cast<__m128i *>(d + 16), z);
  715. d += 32;
  716. }
  717. if ((L & 16U) != 0) {
  718. _mm_storeu_si128(reinterpret_cast<__m128i *>(d), z);
  719. d += 16;
  720. }
  721. if ((L & 8U) != 0) {
  722. *reinterpret_cast<volatile uint64_t *>(d) = 0;
  723. d += 8;
  724. }
  725. if ((L & 4U) != 0) {
  726. *reinterpret_cast<volatile uint32_t *>(d) = 0;
  727. d += 4;
  728. }
  729. if ((L & 2U) != 0) {
  730. *reinterpret_cast<volatile uint16_t *>(d) = 0;
  731. d += 2;
  732. }
  733. if ((L & 1U) != 0) {
  734. *d = 0;
  735. }
  736. #else
  737. memset(dest,0,L);
  738. #endif
  739. }
  740. /**
  741. * Zero memory block whose size is known at run time
  742. *
  743. * @param dest Memory to zero
  744. * @param len Size in bytes
  745. */
  746. static ZT_INLINE void zero(void *const dest, const unsigned int len) noexcept
  747. { memset(dest, 0, len); }
  748. /**
  749. * Simple malloc/free based C++ STL allocator.
  750. *
  751. * This is used to make sure our containers don't use weird libc++
  752. * allocators but instead use whatever malloc() is, which in turn
  753. * can be overridden by things like jemaclloc or tcmalloc.
  754. *
  755. * @tparam T Allocated type
  756. */
  757. template< typename T >
  758. struct Mallocator
  759. {
  760. typedef size_t size_type;
  761. typedef ptrdiff_t difference_type;
  762. typedef T *pointer;
  763. typedef const T *const_pointer;
  764. typedef T &reference;
  765. typedef const T &const_reference;
  766. typedef T value_type;
  767. template< class U >
  768. struct rebind
  769. {
  770. typedef Mallocator< U > other;
  771. };
  772. ZT_INLINE Mallocator() noexcept
  773. {}
  774. ZT_INLINE Mallocator(const Mallocator &) noexcept
  775. {}
  776. template< class U >
  777. ZT_INLINE Mallocator(const Mallocator< U > &) noexcept
  778. {}
  779. ZT_INLINE ~Mallocator() noexcept
  780. {}
  781. ZT_INLINE pointer allocate(size_type s, void const * = nullptr)
  782. {
  783. if (0 == s)
  784. return nullptr;
  785. pointer temp = (pointer)malloc(s * sizeof(T));
  786. if (temp == nullptr)
  787. throw std::bad_alloc();
  788. return temp;
  789. }
  790. ZT_INLINE pointer address(reference x) const
  791. { return &x; }
  792. ZT_INLINE const_pointer address(const_reference x) const
  793. { return &x; }
  794. ZT_INLINE void deallocate(pointer p, size_type)
  795. { free(p); }
  796. ZT_INLINE size_type max_size() const noexcept
  797. { return std::numeric_limits< size_t >::max() / sizeof(T); }
  798. ZT_INLINE void construct(pointer p, const T &val)
  799. { new((void *)p) T(val); }
  800. ZT_INLINE void destroy(pointer p)
  801. { p->~T(); }
  802. constexpr bool operator==(const Mallocator &) const noexcept
  803. { return true; }
  804. constexpr bool operator!=(const Mallocator &) const noexcept
  805. { return false; }
  806. };
  807. } // namespace Utils
  808. } // namespace ZeroTier
  809. #endif