Utils.hpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828
  1. /*
  2. * Copyright (c)2019 ZeroTier, Inc.
  3. *
  4. * Use of this software is governed by the Business Source License included
  5. * in the LICENSE.TXT file in the project's root directory.
  6. *
  7. * Change Date: 2025-01-01
  8. *
  9. * On the date above, in accordance with the Business Source License, use
  10. * of this software will be governed by version 2.0 of the Apache License.
  11. */
  12. /****/
  13. #ifndef ZT_UTILS_HPP
  14. #define ZT_UTILS_HPP
  15. #include <stdio.h>
  16. #include <stdlib.h>
  17. #include <stdint.h>
  18. #include <string.h>
  19. #include <time.h>
  20. #include <string>
  21. #include <stdexcept>
  22. #include <vector>
  23. #include <map>
  24. #if defined(__FreeBSD__)
  25. #include <sys/endian.h>
  26. #endif
  27. #include "Constants.hpp"
  28. #if __BYTE_ORDER == __LITTLE_ENDIAN
  29. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)((uint16_t)((uint16_t)(x) << 8U) | (uint16_t)((uint16_t)(x) >> 8U)))
  30. #define ZT_CONST_TO_BE_UINT64(x) ( \
  31. (((uint64_t)(x) & 0x00000000000000ffULL) << 56U) | \
  32. (((uint64_t)(x) & 0x000000000000ff00ULL) << 40U) | \
  33. (((uint64_t)(x) & 0x0000000000ff0000ULL) << 24U) | \
  34. (((uint64_t)(x) & 0x00000000ff000000ULL) << 8U) | \
  35. (((uint64_t)(x) & 0x000000ff00000000ULL) >> 8U) | \
  36. (((uint64_t)(x) & 0x0000ff0000000000ULL) >> 24U) | \
  37. (((uint64_t)(x) & 0x00ff000000000000ULL) >> 40U) | \
  38. (((uint64_t)(x) & 0xff00000000000000ULL) >> 56U))
  39. #else
  40. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)(x))
  41. #define ZT_CONST_TO_BE_UINT64(x) ((uint64_t)(x))
  42. #endif
  43. #define ZT_ROR64(x, r) (((x) >> (r)) | ((x) << (64 - (r))))
  44. #define ZT_ROL64(x, r) (((x) << (r)) | ((x) >> (64 - (r))))
  45. #define ZT_ROR32(x, r) (((x) >> (r)) | ((x) << (32 - (r))))
  46. #define ZT_ROL32(x, r) (((x) << (r)) | ((x) >> (32 - (r))))
  47. namespace ZeroTier {
  48. /**
  49. * Miscellaneous utility functions and global constants
  50. */
  51. class Utils
  52. {
  53. public:
  54. static const uint64_t ZERO256[4];
  55. #ifdef ZT_ARCH_ARM_HAS_NEON
  56. struct ARMCapabilities
  57. {
  58. ARMCapabilities() noexcept;
  59. bool aes;
  60. bool crc32;
  61. bool pmull;
  62. bool sha1;
  63. bool sha2;
  64. };
  65. static const ARMCapabilities ARMCAP;
  66. #endif
  67. #ifdef ZT_ARCH_X64
  68. struct CPUIDRegisters
  69. {
  70. CPUIDRegisters() noexcept;
  71. bool rdrand;
  72. bool aes;
  73. bool avx;
  74. bool vaes; // implies AVX
  75. bool vpclmulqdq; // implies AVX
  76. bool avx2;
  77. bool avx512f;
  78. bool sha;
  79. bool fsrm;
  80. };
  81. static const CPUIDRegisters CPUID;
  82. #endif
  83. /**
  84. * Compute the log2 (most significant bit set) of a 32-bit integer
  85. *
  86. * @param v Integer to compute
  87. * @return log2 or 0 if v is 0
  88. */
  89. static inline unsigned int log2(uint32_t v)
  90. {
  91. uint32_t r = (v > 0xffff) << 4; v >>= r;
  92. uint32_t shift = (v > 0xff) << 3; v >>= shift; r |= shift;
  93. shift = (v > 0xf) << 2; v >>= shift; r |= shift;
  94. shift = (v > 0x3) << 1; v >>= shift; r |= shift;
  95. r |= (v >> 1);
  96. return (unsigned int)r;
  97. }
  98. /**
  99. * Perform a time-invariant binary comparison
  100. *
  101. * @param a First binary string
  102. * @param b Second binary string
  103. * @param len Length of strings
  104. * @return True if strings are equal
  105. */
  106. static inline bool secureEq(const void *a,const void *b,unsigned int len)
  107. {
  108. uint8_t diff = 0;
  109. for(unsigned int i=0;i<len;++i)
  110. diff |= ( (reinterpret_cast<const uint8_t *>(a))[i] ^ (reinterpret_cast<const uint8_t *>(b))[i] );
  111. return (diff == 0);
  112. }
  113. /**
  114. * Securely zero memory, avoiding compiler optimizations and such
  115. */
  116. static void burn(void *ptr,unsigned int len);
  117. /**
  118. * @param n Number to convert
  119. * @param s Buffer, at least 24 bytes in size
  120. * @return String containing 'n' in base 10 form
  121. */
  122. static char *decimal(unsigned long n,char s[24]);
  123. static inline char *hex(uint64_t i,char s[17])
  124. {
  125. s[0] = HEXCHARS[(i >> 60) & 0xf];
  126. s[1] = HEXCHARS[(i >> 56) & 0xf];
  127. s[2] = HEXCHARS[(i >> 52) & 0xf];
  128. s[3] = HEXCHARS[(i >> 48) & 0xf];
  129. s[4] = HEXCHARS[(i >> 44) & 0xf];
  130. s[5] = HEXCHARS[(i >> 40) & 0xf];
  131. s[6] = HEXCHARS[(i >> 36) & 0xf];
  132. s[7] = HEXCHARS[(i >> 32) & 0xf];
  133. s[8] = HEXCHARS[(i >> 28) & 0xf];
  134. s[9] = HEXCHARS[(i >> 24) & 0xf];
  135. s[10] = HEXCHARS[(i >> 20) & 0xf];
  136. s[11] = HEXCHARS[(i >> 16) & 0xf];
  137. s[12] = HEXCHARS[(i >> 12) & 0xf];
  138. s[13] = HEXCHARS[(i >> 8) & 0xf];
  139. s[14] = HEXCHARS[(i >> 4) & 0xf];
  140. s[15] = HEXCHARS[i & 0xf];
  141. s[16] = (char)0;
  142. return s;
  143. }
  144. static inline char *hex10(uint64_t i,char s[11])
  145. {
  146. s[0] = HEXCHARS[(i >> 36) & 0xf];
  147. s[1] = HEXCHARS[(i >> 32) & 0xf];
  148. s[2] = HEXCHARS[(i >> 28) & 0xf];
  149. s[3] = HEXCHARS[(i >> 24) & 0xf];
  150. s[4] = HEXCHARS[(i >> 20) & 0xf];
  151. s[5] = HEXCHARS[(i >> 16) & 0xf];
  152. s[6] = HEXCHARS[(i >> 12) & 0xf];
  153. s[7] = HEXCHARS[(i >> 8) & 0xf];
  154. s[8] = HEXCHARS[(i >> 4) & 0xf];
  155. s[9] = HEXCHARS[i & 0xf];
  156. s[10] = (char)0;
  157. return s;
  158. }
  159. static inline char *hex(uint32_t i,char s[9])
  160. {
  161. s[0] = HEXCHARS[(i >> 28) & 0xf];
  162. s[1] = HEXCHARS[(i >> 24) & 0xf];
  163. s[2] = HEXCHARS[(i >> 20) & 0xf];
  164. s[3] = HEXCHARS[(i >> 16) & 0xf];
  165. s[4] = HEXCHARS[(i >> 12) & 0xf];
  166. s[5] = HEXCHARS[(i >> 8) & 0xf];
  167. s[6] = HEXCHARS[(i >> 4) & 0xf];
  168. s[7] = HEXCHARS[i & 0xf];
  169. s[8] = (char)0;
  170. return s;
  171. }
  172. static inline char *hex(uint16_t i,char s[5])
  173. {
  174. s[0] = HEXCHARS[(i >> 12) & 0xf];
  175. s[1] = HEXCHARS[(i >> 8) & 0xf];
  176. s[2] = HEXCHARS[(i >> 4) & 0xf];
  177. s[3] = HEXCHARS[i & 0xf];
  178. s[4] = (char)0;
  179. return s;
  180. }
  181. static inline char *hex(uint8_t i,char s[3])
  182. {
  183. s[0] = HEXCHARS[(i >> 4) & 0xf];
  184. s[1] = HEXCHARS[i & 0xf];
  185. s[2] = (char)0;
  186. return s;
  187. }
  188. static inline char *hex(const void *d,unsigned int l,char *s)
  189. {
  190. char *const save = s;
  191. for(unsigned int i=0;i<l;++i) {
  192. const unsigned int b = reinterpret_cast<const uint8_t *>(d)[i];
  193. *(s++) = HEXCHARS[b >> 4];
  194. *(s++) = HEXCHARS[b & 0xf];
  195. }
  196. *s = (char)0;
  197. return save;
  198. }
  199. static inline unsigned int unhex(const char *h,void *buf,unsigned int buflen)
  200. {
  201. unsigned int l = 0;
  202. while (l < buflen) {
  203. uint8_t hc = *(reinterpret_cast<const uint8_t *>(h++));
  204. if (!hc) break;
  205. uint8_t c = 0;
  206. if ((hc >= 48)&&(hc <= 57)) // 0..9
  207. c = hc - 48;
  208. else if ((hc >= 97)&&(hc <= 102)) // a..f
  209. c = hc - 87;
  210. else if ((hc >= 65)&&(hc <= 70)) // A..F
  211. c = hc - 55;
  212. hc = *(reinterpret_cast<const uint8_t *>(h++));
  213. if (!hc) break;
  214. c <<= 4;
  215. if ((hc >= 48)&&(hc <= 57))
  216. c |= hc - 48;
  217. else if ((hc >= 97)&&(hc <= 102))
  218. c |= hc - 87;
  219. else if ((hc >= 65)&&(hc <= 70))
  220. c |= hc - 55;
  221. reinterpret_cast<uint8_t *>(buf)[l++] = c;
  222. }
  223. return l;
  224. }
  225. static inline unsigned int unhex(const char *h,unsigned int hlen,void *buf,unsigned int buflen)
  226. {
  227. unsigned int l = 0;
  228. const char *hend = h + hlen;
  229. while (l < buflen) {
  230. if (h == hend) break;
  231. uint8_t hc = *(reinterpret_cast<const uint8_t *>(h++));
  232. if (!hc) break;
  233. uint8_t c = 0;
  234. if ((hc >= 48)&&(hc <= 57))
  235. c = hc - 48;
  236. else if ((hc >= 97)&&(hc <= 102))
  237. c = hc - 87;
  238. else if ((hc >= 65)&&(hc <= 70))
  239. c = hc - 55;
  240. if (h == hend) break;
  241. hc = *(reinterpret_cast<const uint8_t *>(h++));
  242. if (!hc) break;
  243. c <<= 4;
  244. if ((hc >= 48)&&(hc <= 57))
  245. c |= hc - 48;
  246. else if ((hc >= 97)&&(hc <= 102))
  247. c |= hc - 87;
  248. else if ((hc >= 65)&&(hc <= 70))
  249. c |= hc - 55;
  250. reinterpret_cast<uint8_t *>(buf)[l++] = c;
  251. }
  252. return l;
  253. }
  254. static inline float normalize(float value, float bigMin, float bigMax, float targetMin, float targetMax)
  255. {
  256. float bigSpan = bigMax - bigMin;
  257. float smallSpan = targetMax - targetMin;
  258. float valueScaled = (value - bigMin) / bigSpan;
  259. return targetMin + valueScaled * smallSpan;
  260. }
  261. /**
  262. * Generate secure random bytes
  263. *
  264. * This will try to use whatever OS sources of entropy are available. It's
  265. * guarded by an internal mutex so it's thread-safe.
  266. *
  267. * @param buf Buffer to fill
  268. * @param bytes Number of random bytes to generate
  269. */
  270. static void getSecureRandom(void *buf,unsigned int bytes);
  271. /**
  272. * Tokenize a string (alias for strtok_r or strtok_s depending on platform)
  273. *
  274. * @param str String to split
  275. * @param delim Delimiters
  276. * @param saveptr Pointer to a char * for temporary reentrant storage
  277. */
  278. static inline char *stok(char *str,const char *delim,char **saveptr)
  279. {
  280. #ifdef __WINDOWS__
  281. return strtok_s(str,delim,saveptr);
  282. #else
  283. return strtok_r(str,delim,saveptr);
  284. #endif
  285. }
  286. static inline unsigned int strToUInt(const char *s) { return (unsigned int)strtoul(s,(char **)0,10); }
  287. static inline int strToInt(const char *s) { return (int)strtol(s,(char **)0,10); }
  288. static inline unsigned long strToULong(const char *s) { return strtoul(s,(char **)0,10); }
  289. static inline long strToLong(const char *s) { return strtol(s,(char **)0,10); }
  290. static inline double strToDouble(const char *s) { return strtod(s,NULL); }
  291. static inline unsigned long long strToU64(const char *s)
  292. {
  293. #ifdef __WINDOWS__
  294. return (unsigned long long)_strtoui64(s,(char **)0,10);
  295. #else
  296. return strtoull(s,(char **)0,10);
  297. #endif
  298. }
  299. static inline long long strTo64(const char *s)
  300. {
  301. #ifdef __WINDOWS__
  302. return (long long)_strtoi64(s,(char **)0,10);
  303. #else
  304. return strtoll(s,(char **)0,10);
  305. #endif
  306. }
  307. static inline unsigned int hexStrToUInt(const char *s) { return (unsigned int)strtoul(s,(char **)0,16); }
  308. static inline int hexStrToInt(const char *s) { return (int)strtol(s,(char **)0,16); }
  309. static inline unsigned long hexStrToULong(const char *s) { return strtoul(s,(char **)0,16); }
  310. static inline long hexStrToLong(const char *s) { return strtol(s,(char **)0,16); }
  311. static inline unsigned long long hexStrToU64(const char *s)
  312. {
  313. #ifdef __WINDOWS__
  314. return (unsigned long long)_strtoui64(s,(char **)0,16);
  315. #else
  316. return strtoull(s,(char **)0,16);
  317. #endif
  318. }
  319. static inline long long hexStrTo64(const char *s)
  320. {
  321. #ifdef __WINDOWS__
  322. return (long long)_strtoi64(s,(char **)0,16);
  323. #else
  324. return strtoll(s,(char **)0,16);
  325. #endif
  326. }
  327. /**
  328. * Perform a safe C string copy, ALWAYS null-terminating the result
  329. *
  330. * This will never ever EVER result in dest[] not being null-terminated
  331. * regardless of any input parameter (other than len==0 which is invalid).
  332. *
  333. * @param dest Destination buffer (must not be NULL)
  334. * @param len Length of dest[] (if zero, false is returned and nothing happens)
  335. * @param src Source string (if NULL, dest will receive a zero-length string and true is returned)
  336. * @return True on success, false on overflow (buffer will still be 0-terminated)
  337. */
  338. static inline bool scopy(char *dest,unsigned int len,const char *src)
  339. {
  340. if (!len)
  341. return false; // sanity check
  342. if (!src) {
  343. *dest = (char)0;
  344. return true;
  345. }
  346. char *end = dest + len;
  347. while ((*dest++ = *src++)) {
  348. if (dest == end) {
  349. *(--dest) = (char)0;
  350. return false;
  351. }
  352. }
  353. return true;
  354. }
  355. /**
  356. * Count the number of bits set in an integer
  357. *
  358. * @param v 32-bit integer
  359. * @return Number of bits set in this integer (0-32)
  360. */
  361. static inline uint32_t countBits(uint32_t v)
  362. {
  363. v = v - ((v >> 1) & (uint32_t)0x55555555);
  364. v = (v & (uint32_t)0x33333333) + ((v >> 2) & (uint32_t)0x33333333);
  365. return ((((v + (v >> 4)) & (uint32_t)0xF0F0F0F) * (uint32_t)0x1010101) >> 24);
  366. }
  367. /**
  368. * Count the number of bits set in an integer
  369. *
  370. * @param v 64-bit integer
  371. * @return Number of bits set in this integer (0-64)
  372. */
  373. static inline uint64_t countBits(uint64_t v)
  374. {
  375. v = v - ((v >> 1) & (uint64_t)~(uint64_t)0/3);
  376. v = (v & (uint64_t)~(uint64_t)0/15*3) + ((v >> 2) & (uint64_t)~(uint64_t)0/15*3);
  377. v = (v + (v >> 4)) & (uint64_t)~(uint64_t)0/255*15;
  378. return (uint64_t)(v * ((uint64_t)~(uint64_t)0/255)) >> 56;
  379. }
  380. /**
  381. * Check if a memory buffer is all-zero
  382. *
  383. * @param p Memory to scan
  384. * @param len Length of memory
  385. * @return True if memory is all zero
  386. */
  387. static inline bool isZero(const void *p,unsigned int len)
  388. {
  389. for(unsigned int i=0;i<len;++i) {
  390. if (((const unsigned char *)p)[i])
  391. return false;
  392. }
  393. return true;
  394. }
  395. /**
  396. * Unconditionally swap bytes regardless of host byte order
  397. *
  398. * @param n Integer to swap
  399. * @return Integer with bytes reversed
  400. */
  401. static ZT_INLINE uint64_t swapBytes(const uint64_t n) noexcept
  402. {
  403. #ifdef __GNUC__
  404. return __builtin_bswap64(n);
  405. #else
  406. #ifdef _MSC_VER
  407. return (uint64_t)_byteswap_uint64((unsigned __int64)n);
  408. #else
  409. return (
  410. ((n & 0x00000000000000ffULL) << 56) |
  411. ((n & 0x000000000000ff00ULL) << 40) |
  412. ((n & 0x0000000000ff0000ULL) << 24) |
  413. ((n & 0x00000000ff000000ULL) << 8) |
  414. ((n & 0x000000ff00000000ULL) >> 8) |
  415. ((n & 0x0000ff0000000000ULL) >> 24) |
  416. ((n & 0x00ff000000000000ULL) >> 40) |
  417. ((n & 0xff00000000000000ULL) >> 56)
  418. );
  419. #endif
  420. #endif
  421. }
  422. /**
  423. * Unconditionally swap bytes regardless of host byte order
  424. *
  425. * @param n Integer to swap
  426. * @return Integer with bytes reversed
  427. */
  428. static ZT_INLINE uint32_t swapBytes(const uint32_t n) noexcept
  429. {
  430. #if defined(__GNUC__)
  431. return __builtin_bswap32(n);
  432. #else
  433. #ifdef _MSC_VER
  434. return (uint32_t)_byteswap_ulong((unsigned long)n);
  435. #else
  436. return htonl(n);
  437. #endif
  438. #endif
  439. }
  440. /**
  441. * Unconditionally swap bytes regardless of host byte order
  442. *
  443. * @param n Integer to swap
  444. * @return Integer with bytes reversed
  445. */
  446. static ZT_INLINE uint16_t swapBytes(const uint16_t n) noexcept
  447. {
  448. #if defined(__GNUC__)
  449. return __builtin_bswap16(n);
  450. #else
  451. #ifdef _MSC_VER
  452. return (uint16_t)_byteswap_ushort((unsigned short)n);
  453. #else
  454. return htons(n);
  455. #endif
  456. #endif
  457. }
  458. // These are helper adapters to load and swap integer types special cased by size
  459. // to work with all typedef'd variants, signed/unsigned, etc.
  460. template< typename I, unsigned int S >
  461. class _swap_bytes_bysize;
  462. template< typename I >
  463. class _swap_bytes_bysize< I, 1 >
  464. {
  465. public:
  466. static ZT_INLINE I s(const I n) noexcept
  467. { return n; }
  468. };
  469. template< typename I >
  470. class _swap_bytes_bysize< I, 2 >
  471. {
  472. public:
  473. static ZT_INLINE I s(const I n) noexcept
  474. { return (I)swapBytes((uint16_t)n); }
  475. };
  476. template< typename I >
  477. class _swap_bytes_bysize< I, 4 >
  478. {
  479. public:
  480. static ZT_INLINE I s(const I n) noexcept
  481. { return (I)swapBytes((uint32_t)n); }
  482. };
  483. template< typename I >
  484. class _swap_bytes_bysize< I, 8 >
  485. {
  486. public:
  487. static ZT_INLINE I s(const I n) noexcept
  488. { return (I)swapBytes((uint64_t)n); }
  489. };
  490. template< typename I, unsigned int S >
  491. class _load_be_bysize;
  492. template< typename I >
  493. class _load_be_bysize< I, 1 >
  494. {
  495. public:
  496. static ZT_INLINE I l(const uint8_t *const p) noexcept
  497. { return p[0]; }
  498. };
  499. template< typename I >
  500. class _load_be_bysize< I, 2 >
  501. {
  502. public:
  503. static ZT_INLINE I l(const uint8_t *const p) noexcept
  504. { return (I)(((unsigned int)p[0] << 8U) | (unsigned int)p[1]); }
  505. };
  506. template< typename I >
  507. class _load_be_bysize< I, 4 >
  508. {
  509. public:
  510. static ZT_INLINE I l(const uint8_t *const p) noexcept
  511. { return (I)(((uint32_t)p[0] << 24U) | ((uint32_t)p[1] << 16U) | ((uint32_t)p[2] << 8U) | (uint32_t)p[3]); }
  512. };
  513. template< typename I >
  514. class _load_be_bysize< I, 8 >
  515. {
  516. public:
  517. static ZT_INLINE I l(const uint8_t *const p) noexcept
  518. { return (I)(((uint64_t)p[0] << 56U) | ((uint64_t)p[1] << 48U) | ((uint64_t)p[2] << 40U) | ((uint64_t)p[3] << 32U) | ((uint64_t)p[4] << 24U) | ((uint64_t)p[5] << 16U) | ((uint64_t)p[6] << 8U) | (uint64_t)p[7]); }
  519. };
  520. template< typename I, unsigned int S >
  521. class _load_le_bysize;
  522. template< typename I >
  523. class _load_le_bysize< I, 1 >
  524. {
  525. public:
  526. static ZT_INLINE I l(const uint8_t *const p) noexcept
  527. { return p[0]; }
  528. };
  529. template< typename I >
  530. class _load_le_bysize< I, 2 >
  531. {
  532. public:
  533. static ZT_INLINE I l(const uint8_t *const p) noexcept
  534. { return (I)((unsigned int)p[0] | ((unsigned int)p[1] << 8U)); }
  535. };
  536. template< typename I >
  537. class _load_le_bysize< I, 4 >
  538. {
  539. public:
  540. static ZT_INLINE I l(const uint8_t *const p) noexcept
  541. { return (I)((uint32_t)p[0] | ((uint32_t)p[1] << 8U) | ((uint32_t)p[2] << 16U) | ((uint32_t)p[3] << 24U)); }
  542. };
  543. template< typename I >
  544. class _load_le_bysize< I, 8 >
  545. {
  546. public:
  547. static ZT_INLINE I l(const uint8_t *const p) noexcept
  548. { return (I)((uint64_t)p[0] | ((uint64_t)p[1] << 8U) | ((uint64_t)p[2] << 16U) | ((uint64_t)p[3] << 24U) | ((uint64_t)p[4] << 32U) | ((uint64_t)p[5] << 40U) | ((uint64_t)p[6] << 48U) | ((uint64_t)p[7]) << 56U); }
  549. };
  550. /**
  551. * Convert any signed or unsigned integer type to big-endian ("network") byte order
  552. *
  553. * @tparam I Integer type (usually inferred)
  554. * @param n Value to convert
  555. * @return Value in big-endian order
  556. */
  557. template< typename I >
  558. static ZT_INLINE I hton(const I n) noexcept
  559. {
  560. #if __BYTE_ORDER == __LITTLE_ENDIAN
  561. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  562. #else
  563. return n;
  564. #endif
  565. }
  566. /**
  567. * Convert any signed or unsigned integer type to host byte order from big-endian ("network") byte order
  568. *
  569. * @tparam I Integer type (usually inferred)
  570. * @param n Value to convert
  571. * @return Value in host byte order
  572. */
  573. template< typename I >
  574. static ZT_INLINE I ntoh(const I n) noexcept
  575. {
  576. #if __BYTE_ORDER == __LITTLE_ENDIAN
  577. return _swap_bytes_bysize< I, sizeof(I) >::s(n);
  578. #else
  579. return n;
  580. #endif
  581. }
  582. /**
  583. * Copy bits from memory into an integer type without modifying their order
  584. *
  585. * @tparam I Type to load
  586. * @param p Byte stream, must be at least sizeof(I) in size
  587. * @return Loaded raw integer
  588. */
  589. template< typename I >
  590. static ZT_INLINE I loadMachineEndian(const void *const p) noexcept
  591. {
  592. #ifdef ZT_NO_UNALIGNED_ACCESS
  593. I tmp;
  594. for(int i=0;i<(int)sizeof(I);++i)
  595. reinterpret_cast<uint8_t *>(&tmp)[i] = reinterpret_cast<const uint8_t *>(p)[i];
  596. return tmp;
  597. #else
  598. return *reinterpret_cast<const I *>(p);
  599. #endif
  600. }
  601. /**
  602. * Copy bits from memory into an integer type without modifying their order
  603. *
  604. * @tparam I Type to store
  605. * @param p Byte array (must be at least sizeof(I))
  606. * @param i Integer to store
  607. */
  608. template< typename I >
  609. static ZT_INLINE void storeMachineEndian(void *const p, const I i) noexcept
  610. {
  611. #ifdef ZT_NO_UNALIGNED_ACCESS
  612. for(unsigned int k=0;k<sizeof(I);++k)
  613. reinterpret_cast<uint8_t *>(p)[k] = reinterpret_cast<const uint8_t *>(&i)[k];
  614. #else
  615. *reinterpret_cast<I *>(p) = i;
  616. #endif
  617. }
  618. /**
  619. * Decode a big-endian value from a byte stream
  620. *
  621. * @tparam I Type to decode (should be unsigned e.g. uint32_t or uint64_t)
  622. * @param p Byte stream, must be at least sizeof(I) in size
  623. * @return Decoded integer
  624. */
  625. template< typename I >
  626. static ZT_INLINE I loadBigEndian(const void *const p) noexcept
  627. {
  628. #ifdef ZT_NO_UNALIGNED_ACCESS
  629. return _load_be_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  630. #else
  631. return ntoh(*reinterpret_cast<const I *>(p));
  632. #endif
  633. }
  634. /**
  635. * Save an integer in big-endian format
  636. *
  637. * @tparam I Integer type to store (usually inferred)
  638. * @param p Byte stream to write (must be at least sizeof(I))
  639. * #param i Integer to write
  640. */
  641. template< typename I >
  642. static ZT_INLINE void storeBigEndian(void *const p, I i) noexcept
  643. {
  644. #ifdef ZT_NO_UNALIGNED_ACCESS
  645. storeMachineEndian(p,hton(i));
  646. #else
  647. *reinterpret_cast<I *>(p) = hton(i);
  648. #endif
  649. }
  650. /**
  651. * Decode a little-endian value from a byte stream
  652. *
  653. * @tparam I Type to decode
  654. * @param p Byte stream, must be at least sizeof(I) in size
  655. * @return Decoded integer
  656. */
  657. template< typename I >
  658. static ZT_INLINE I loadLittleEndian(const void *const p) noexcept
  659. {
  660. #if __BYTE_ORDER == __BIG_ENDIAN || defined(ZT_NO_UNALIGNED_ACCESS)
  661. return _load_le_bysize<I,sizeof(I)>::l(reinterpret_cast<const uint8_t *>(p));
  662. #else
  663. return *reinterpret_cast<const I *>(p);
  664. #endif
  665. }
  666. /**
  667. * Save an integer in little-endian format
  668. *
  669. * @tparam I Integer type to store (usually inferred)
  670. * @param p Byte stream to write (must be at least sizeof(I))
  671. * #param i Integer to write
  672. */
  673. template< typename I >
  674. static ZT_INLINE void storeLittleEndian(void *const p, const I i) noexcept
  675. {
  676. #if __BYTE_ORDER == __BIG_ENDIAN
  677. storeMachineEndian(p,_swap_bytes_bysize<I,sizeof(I)>::s(i));
  678. #else
  679. #ifdef ZT_NO_UNALIGNED_ACCESS
  680. storeMachineEndian(p,i);
  681. #else
  682. *reinterpret_cast<I *>(p) = i;
  683. #endif
  684. #endif
  685. }
  686. /**
  687. * Copy memory block whose size is known at compile time.
  688. *
  689. * @tparam L Size of memory
  690. * @param dest Destination memory
  691. * @param src Source memory
  692. */
  693. template< unsigned long L >
  694. static ZT_INLINE void copy(void *dest, const void *src) noexcept
  695. {
  696. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  697. uintptr_t l = L;
  698. __asm__ __volatile__ ("cld ; rep movsb" : "+c"(l), "+S"(src), "+D"(dest) :: "memory");
  699. #else
  700. memcpy(dest, src, L);
  701. #endif
  702. }
  703. /**
  704. * Copy memory block whose size is known at run time
  705. *
  706. * @param dest Destination memory
  707. * @param src Source memory
  708. * @param len Bytes to copy
  709. */
  710. static ZT_INLINE void copy(void *dest, const void *src, unsigned long len) noexcept
  711. {
  712. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  713. __asm__ __volatile__ ("cld ; rep movsb" : "+c"(len), "+S"(src), "+D"(dest) :: "memory");
  714. #else
  715. memcpy(dest, src, len);
  716. #endif
  717. }
  718. /**
  719. * Zero memory block whose size is known at compile time
  720. *
  721. * @tparam L Size in bytes
  722. * @param dest Memory to zero
  723. */
  724. template< unsigned long L >
  725. static ZT_INLINE void zero(void *dest) noexcept
  726. {
  727. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  728. uintptr_t l = L;
  729. __asm__ __volatile__ ("cld ; rep stosb" :"+c" (l), "+D" (dest) : "a" (0) : "memory");
  730. #else
  731. memset(dest, 0, L);
  732. #endif
  733. }
  734. /**
  735. * Zero memory block whose size is known at run time
  736. *
  737. * @param dest Memory to zero
  738. * @param len Size in bytes
  739. */
  740. static ZT_INLINE void zero(void *dest, unsigned long len) noexcept
  741. {
  742. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  743. __asm__ __volatile__ ("cld ; rep stosb" :"+c" (len), "+D" (dest) : "a" (0) : "memory");
  744. #else
  745. memset(dest, 0, len);
  746. #endif
  747. }
  748. /**
  749. * Hexadecimal characters 0-f
  750. */
  751. static const char HEXCHARS[16];
  752. };
  753. } // namespace ZeroTier
  754. #endif