Utils.hpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870
  1. /* This Source Code Form is subject to the terms of the Mozilla Public
  2. * License, v. 2.0. If a copy of the MPL was not distributed with this
  3. * file, You can obtain one at https://mozilla.org/MPL/2.0/.
  4. *
  5. * (c) ZeroTier, Inc.
  6. * https://www.zerotier.com/
  7. */
  8. #ifndef ZT_UTILS_HPP
  9. #define ZT_UTILS_HPP
  10. #include <algorithm>
  11. #include <map>
  12. #include <stdexcept>
  13. #include <stdint.h>
  14. #include <stdio.h>
  15. #include <stdlib.h>
  16. #include <string.h>
  17. #include <string>
  18. #include <time.h>
  19. #include <vector>
  20. #if defined(__FreeBSD__)
  21. #include <sys/endian.h>
  22. #endif
  23. #include "Constants.hpp"
  24. #if __BYTE_ORDER == __LITTLE_ENDIAN
  25. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)((uint16_t)((uint16_t)(x) << 8U) | (uint16_t)((uint16_t)(x) >> 8U)))
  26. #define ZT_CONST_TO_BE_UINT64(x) \
  27. ((((uint64_t)(x) & 0x00000000000000ffULL) << 56U) | (((uint64_t)(x) & 0x000000000000ff00ULL) << 40U) | (((uint64_t)(x) & 0x0000000000ff0000ULL) << 24U) | (((uint64_t)(x) & 0x00000000ff000000ULL) << 8U) \
  28. | (((uint64_t)(x) & 0x000000ff00000000ULL) >> 8U) | (((uint64_t)(x) & 0x0000ff0000000000ULL) >> 24U) | (((uint64_t)(x) & 0x00ff000000000000ULL) >> 40U) | (((uint64_t)(x) & 0xff00000000000000ULL) >> 56U))
  29. #else
  30. #define ZT_CONST_TO_BE_UINT16(x) ((uint16_t)(x))
  31. #define ZT_CONST_TO_BE_UINT64(x) ((uint64_t)(x))
  32. #endif
  33. #define ZT_ROR64(x, r) (((x) >> (r)) | ((x) << (64 - (r))))
  34. #define ZT_ROL64(x, r) (((x) << (r)) | ((x) >> (64 - (r))))
  35. #define ZT_ROR32(x, r) (((x) >> (r)) | ((x) << (32 - (r))))
  36. #define ZT_ROL32(x, r) (((x) << (r)) | ((x) >> (32 - (r))))
  37. namespace ZeroTier {
  38. /**
  39. * Miscellaneous utility functions and global constants
  40. */
  41. class Utils {
  42. public:
  43. static const uint64_t ZERO256[4];
  44. #ifdef ZT_ARCH_ARM_HAS_NEON
  45. struct ARMCapabilities {
  46. ARMCapabilities() noexcept;
  47. bool aes;
  48. bool crc32;
  49. bool pmull;
  50. bool sha1;
  51. bool sha2;
  52. };
  53. static const ARMCapabilities ARMCAP;
  54. #endif
  55. #ifdef ZT_ARCH_X64
  56. struct CPUIDRegisters {
  57. CPUIDRegisters() noexcept;
  58. bool rdrand;
  59. bool aes;
  60. bool avx;
  61. bool vaes; // implies AVX
  62. bool vpclmulqdq; // implies AVX
  63. bool avx2;
  64. bool avx512f;
  65. bool sha;
  66. bool fsrm;
  67. };
  68. static const CPUIDRegisters CPUID;
  69. #endif
  70. /**
  71. * Compute the log2 (most significant bit set) of a 32-bit integer
  72. *
  73. * @param v Integer to compute
  74. * @return log2 or 0 if v is 0
  75. */
  76. static inline unsigned int log2(uint32_t v)
  77. {
  78. uint32_t r = (v > 0xffff) << 4;
  79. v >>= r;
  80. uint32_t shift = (v > 0xff) << 3;
  81. v >>= shift;
  82. r |= shift;
  83. shift = (v > 0xf) << 2;
  84. v >>= shift;
  85. r |= shift;
  86. shift = (v > 0x3) << 1;
  87. v >>= shift;
  88. r |= shift;
  89. r |= (v >> 1);
  90. return (unsigned int)r;
  91. }
  92. /**
  93. * Perform a time-invariant binary comparison
  94. *
  95. * @param a First binary string
  96. * @param b Second binary string
  97. * @param len Length of strings
  98. * @return True if strings are equal
  99. */
  100. static inline bool secureEq(const void* a, const void* b, unsigned int len)
  101. {
  102. uint8_t diff = 0;
  103. for (unsigned int i = 0; i < len; ++i) {
  104. diff |= ((reinterpret_cast<const uint8_t*>(a))[i] ^ (reinterpret_cast<const uint8_t*>(b))[i]);
  105. }
  106. return (diff == 0);
  107. }
  108. /**
  109. * Securely zero memory, avoiding compiler optimizations and such
  110. */
  111. static void burn(void* ptr, unsigned int len);
  112. /**
  113. * @param n Number to convert
  114. * @param s Buffer, at least 24 bytes in size
  115. * @return String containing 'n' in base 10 form
  116. */
  117. static char* decimal(unsigned long n, char s[24]);
  118. static inline char* hex(uint64_t i, char s[17])
  119. {
  120. s[0] = HEXCHARS[(i >> 60) & 0xf];
  121. s[1] = HEXCHARS[(i >> 56) & 0xf];
  122. s[2] = HEXCHARS[(i >> 52) & 0xf];
  123. s[3] = HEXCHARS[(i >> 48) & 0xf];
  124. s[4] = HEXCHARS[(i >> 44) & 0xf];
  125. s[5] = HEXCHARS[(i >> 40) & 0xf];
  126. s[6] = HEXCHARS[(i >> 36) & 0xf];
  127. s[7] = HEXCHARS[(i >> 32) & 0xf];
  128. s[8] = HEXCHARS[(i >> 28) & 0xf];
  129. s[9] = HEXCHARS[(i >> 24) & 0xf];
  130. s[10] = HEXCHARS[(i >> 20) & 0xf];
  131. s[11] = HEXCHARS[(i >> 16) & 0xf];
  132. s[12] = HEXCHARS[(i >> 12) & 0xf];
  133. s[13] = HEXCHARS[(i >> 8) & 0xf];
  134. s[14] = HEXCHARS[(i >> 4) & 0xf];
  135. s[15] = HEXCHARS[i & 0xf];
  136. s[16] = (char)0;
  137. return s;
  138. }
  139. static inline char* hex10(uint64_t i, char s[11])
  140. {
  141. s[0] = HEXCHARS[(i >> 36) & 0xf];
  142. s[1] = HEXCHARS[(i >> 32) & 0xf];
  143. s[2] = HEXCHARS[(i >> 28) & 0xf];
  144. s[3] = HEXCHARS[(i >> 24) & 0xf];
  145. s[4] = HEXCHARS[(i >> 20) & 0xf];
  146. s[5] = HEXCHARS[(i >> 16) & 0xf];
  147. s[6] = HEXCHARS[(i >> 12) & 0xf];
  148. s[7] = HEXCHARS[(i >> 8) & 0xf];
  149. s[8] = HEXCHARS[(i >> 4) & 0xf];
  150. s[9] = HEXCHARS[i & 0xf];
  151. s[10] = (char)0;
  152. return s;
  153. }
  154. static inline char* hex(uint32_t i, char s[9])
  155. {
  156. s[0] = HEXCHARS[(i >> 28) & 0xf];
  157. s[1] = HEXCHARS[(i >> 24) & 0xf];
  158. s[2] = HEXCHARS[(i >> 20) & 0xf];
  159. s[3] = HEXCHARS[(i >> 16) & 0xf];
  160. s[4] = HEXCHARS[(i >> 12) & 0xf];
  161. s[5] = HEXCHARS[(i >> 8) & 0xf];
  162. s[6] = HEXCHARS[(i >> 4) & 0xf];
  163. s[7] = HEXCHARS[i & 0xf];
  164. s[8] = (char)0;
  165. return s;
  166. }
  167. static inline char* hex(uint16_t i, char s[5])
  168. {
  169. s[0] = HEXCHARS[(i >> 12) & 0xf];
  170. s[1] = HEXCHARS[(i >> 8) & 0xf];
  171. s[2] = HEXCHARS[(i >> 4) & 0xf];
  172. s[3] = HEXCHARS[i & 0xf];
  173. s[4] = (char)0;
  174. return s;
  175. }
  176. static inline char* hex(uint8_t i, char s[3])
  177. {
  178. s[0] = HEXCHARS[(i >> 4) & 0xf];
  179. s[1] = HEXCHARS[i & 0xf];
  180. s[2] = (char)0;
  181. return s;
  182. }
  183. static inline char* hex(const void* d, unsigned int l, char* s)
  184. {
  185. char* const save = s;
  186. for (unsigned int i = 0; i < l; ++i) {
  187. const unsigned int b = reinterpret_cast<const uint8_t*>(d)[i];
  188. *(s++) = HEXCHARS[b >> 4];
  189. *(s++) = HEXCHARS[b & 0xf];
  190. }
  191. *s = (char)0;
  192. return save;
  193. }
  194. static inline unsigned int unhex(const char* h, void* buf, unsigned int buflen)
  195. {
  196. unsigned int l = 0;
  197. while (l < buflen) {
  198. uint8_t hc = *(reinterpret_cast<const uint8_t*>(h++));
  199. if (! hc) {
  200. break;
  201. }
  202. uint8_t c = 0;
  203. if ((hc >= 48) && (hc <= 57)) { // 0..9
  204. c = hc - 48;
  205. }
  206. else if ((hc >= 97) && (hc <= 102)) { // a..f
  207. c = hc - 87;
  208. }
  209. else if ((hc >= 65) && (hc <= 70)) { // A..F
  210. c = hc - 55;
  211. }
  212. hc = *(reinterpret_cast<const uint8_t*>(h++));
  213. if (! hc) {
  214. break;
  215. }
  216. c <<= 4;
  217. if ((hc >= 48) && (hc <= 57)) {
  218. c |= hc - 48;
  219. }
  220. else if ((hc >= 97) && (hc <= 102)) {
  221. c |= hc - 87;
  222. }
  223. else if ((hc >= 65) && (hc <= 70)) {
  224. c |= hc - 55;
  225. }
  226. reinterpret_cast<uint8_t*>(buf)[l++] = c;
  227. }
  228. return l;
  229. }
  230. static inline unsigned int unhex(const char* h, unsigned int hlen, void* buf, unsigned int buflen)
  231. {
  232. unsigned int l = 0;
  233. const char* hend = h + hlen;
  234. while (l < buflen) {
  235. if (h == hend) {
  236. break;
  237. }
  238. uint8_t hc = *(reinterpret_cast<const uint8_t*>(h++));
  239. if (! hc) {
  240. break;
  241. }
  242. uint8_t c = 0;
  243. if ((hc >= 48) && (hc <= 57)) {
  244. c = hc - 48;
  245. }
  246. else if ((hc >= 97) && (hc <= 102)) {
  247. c = hc - 87;
  248. }
  249. else if ((hc >= 65) && (hc <= 70)) {
  250. c = hc - 55;
  251. }
  252. if (h == hend) {
  253. break;
  254. }
  255. hc = *(reinterpret_cast<const uint8_t*>(h++));
  256. if (! hc) {
  257. break;
  258. }
  259. c <<= 4;
  260. if ((hc >= 48) && (hc <= 57)) {
  261. c |= hc - 48;
  262. }
  263. else if ((hc >= 97) && (hc <= 102)) {
  264. c |= hc - 87;
  265. }
  266. else if ((hc >= 65) && (hc <= 70)) {
  267. c |= hc - 55;
  268. }
  269. reinterpret_cast<uint8_t*>(buf)[l++] = c;
  270. }
  271. return l;
  272. }
  273. static inline float normalize(float value, float bigMin, float bigMax, float targetMin, float targetMax)
  274. {
  275. float bigSpan = bigMax - bigMin;
  276. float smallSpan = targetMax - targetMin;
  277. float valueScaled = (value - bigMin) / bigSpan;
  278. return targetMin + valueScaled * smallSpan;
  279. }
  280. /**
  281. * Generate secure random bytes
  282. *
  283. * This will try to use whatever OS sources of entropy are available. It's
  284. * guarded by an internal mutex so it's thread-safe.
  285. *
  286. * @param buf Buffer to fill
  287. * @param bytes Number of random bytes to generate
  288. */
  289. static void getSecureRandom(void* buf, unsigned int bytes);
  290. /**
  291. * Tokenize a string (alias for strtok_r or strtok_s depending on platform)
  292. *
  293. * @param str String to split
  294. * @param delim Delimiters
  295. * @param saveptr Pointer to a char * for temporary reentrant storage
  296. */
  297. static inline char* stok(char* str, const char* delim, char** saveptr)
  298. {
  299. #ifdef __WINDOWS__
  300. return strtok_s(str, delim, saveptr);
  301. #else
  302. return strtok_r(str, delim, saveptr);
  303. #endif
  304. }
  305. static inline unsigned int strToUInt(const char* s)
  306. {
  307. return (unsigned int)strtoul(s, (char**)0, 10);
  308. }
  309. static inline int strToInt(const char* s)
  310. {
  311. return (int)strtol(s, (char**)0, 10);
  312. }
  313. static inline unsigned long strToULong(const char* s)
  314. {
  315. return strtoul(s, (char**)0, 10);
  316. }
  317. static inline long strToLong(const char* s)
  318. {
  319. return strtol(s, (char**)0, 10);
  320. }
  321. static inline double strToDouble(const char* s)
  322. {
  323. return strtod(s, NULL);
  324. }
  325. static inline unsigned long long strToU64(const char* s)
  326. {
  327. #ifdef __WINDOWS__
  328. return (unsigned long long)_strtoui64(s, (char**)0, 10);
  329. #else
  330. return strtoull(s, (char**)0, 10);
  331. #endif
  332. }
  333. static inline long long strTo64(const char* s)
  334. {
  335. #ifdef __WINDOWS__
  336. return (long long)_strtoi64(s, (char**)0, 10);
  337. #else
  338. return strtoll(s, (char**)0, 10);
  339. #endif
  340. }
  341. static inline unsigned int hexStrToUInt(const char* s)
  342. {
  343. return (unsigned int)strtoul(s, (char**)0, 16);
  344. }
  345. static inline int hexStrToInt(const char* s)
  346. {
  347. return (int)strtol(s, (char**)0, 16);
  348. }
  349. static inline unsigned long hexStrToULong(const char* s)
  350. {
  351. return strtoul(s, (char**)0, 16);
  352. }
  353. static inline long hexStrToLong(const char* s)
  354. {
  355. return strtol(s, (char**)0, 16);
  356. }
  357. static inline unsigned long long hexStrToU64(const char* s)
  358. {
  359. #ifdef __WINDOWS__
  360. return (unsigned long long)_strtoui64(s, (char**)0, 16);
  361. #else
  362. return strtoull(s, (char**)0, 16);
  363. #endif
  364. }
  365. static inline long long hexStrTo64(const char* s)
  366. {
  367. #ifdef __WINDOWS__
  368. return (long long)_strtoi64(s, (char**)0, 16);
  369. #else
  370. return strtoll(s, (char**)0, 16);
  371. #endif
  372. }
  373. /**
  374. * Perform a safe C string copy, ALWAYS null-terminating the result
  375. *
  376. * This will never ever EVER result in dest[] not being null-terminated
  377. * regardless of any input parameter (other than len==0 which is invalid).
  378. *
  379. * @param dest Destination buffer (must not be NULL)
  380. * @param len Length of dest[] (if zero, false is returned and nothing happens)
  381. * @param src Source string (if NULL, dest will receive a zero-length string and true is returned)
  382. * @return True on success, false on overflow (buffer will still be 0-terminated)
  383. */
  384. static inline bool scopy(char* dest, unsigned int len, const char* src)
  385. {
  386. if (! len) {
  387. return false; // sanity check
  388. }
  389. if (! src) {
  390. *dest = (char)0;
  391. return true;
  392. }
  393. char* end = dest + len;
  394. while ((*dest++ = *src++)) {
  395. if (dest == end) {
  396. *(--dest) = (char)0;
  397. return false;
  398. }
  399. }
  400. return true;
  401. }
  402. /**
  403. * Count the number of bits set in an integer
  404. *
  405. * @param v 32-bit integer
  406. * @return Number of bits set in this integer (0-32)
  407. */
  408. static inline uint32_t countBits(uint32_t v)
  409. {
  410. v = v - ((v >> 1) & (uint32_t)0x55555555);
  411. v = (v & (uint32_t)0x33333333) + ((v >> 2) & (uint32_t)0x33333333);
  412. return ((((v + (v >> 4)) & (uint32_t)0xF0F0F0F) * (uint32_t)0x1010101) >> 24);
  413. }
  414. /**
  415. * Count the number of bits set in an integer
  416. *
  417. * @param v 64-bit integer
  418. * @return Number of bits set in this integer (0-64)
  419. */
  420. static inline uint64_t countBits(uint64_t v)
  421. {
  422. v = v - ((v >> 1) & (uint64_t)~(uint64_t)0 / 3);
  423. v = (v & (uint64_t)~(uint64_t)0 / 15 * 3) + ((v >> 2) & (uint64_t)~(uint64_t)0 / 15 * 3);
  424. v = (v + (v >> 4)) & (uint64_t)~(uint64_t)0 / 255 * 15;
  425. return (uint64_t)(v * ((uint64_t)~(uint64_t)0 / 255)) >> 56;
  426. }
  427. /**
  428. * Check if a memory buffer is all-zero
  429. *
  430. * @param p Memory to scan
  431. * @param len Length of memory
  432. * @return True if memory is all zero
  433. */
  434. static inline bool isZero(const void* p, unsigned int len)
  435. {
  436. for (unsigned int i = 0; i < len; ++i) {
  437. if (((const unsigned char*)p)[i]) {
  438. return false;
  439. }
  440. }
  441. return true;
  442. }
  443. /**
  444. * Unconditionally swap bytes regardless of host byte order
  445. *
  446. * @param n Integer to swap
  447. * @return Integer with bytes reversed
  448. */
  449. static ZT_INLINE uint64_t swapBytes(const uint64_t n) noexcept
  450. {
  451. #ifdef __GNUC__
  452. return __builtin_bswap64(n);
  453. #else
  454. #ifdef _MSC_VER
  455. return (uint64_t)_byteswap_uint64((unsigned __int64)n);
  456. #else
  457. return (
  458. ((n & 0x00000000000000ffULL) << 56) | ((n & 0x000000000000ff00ULL) << 40) | ((n & 0x0000000000ff0000ULL) << 24) | ((n & 0x00000000ff000000ULL) << 8) | ((n & 0x000000ff00000000ULL) >> 8) | ((n & 0x0000ff0000000000ULL) >> 24)
  459. | ((n & 0x00ff000000000000ULL) >> 40) | ((n & 0xff00000000000000ULL) >> 56));
  460. #endif
  461. #endif
  462. }
  463. /**
  464. * Unconditionally swap bytes regardless of host byte order
  465. *
  466. * @param n Integer to swap
  467. * @return Integer with bytes reversed
  468. */
  469. static ZT_INLINE uint32_t swapBytes(const uint32_t n) noexcept
  470. {
  471. #if defined(__GNUC__)
  472. return __builtin_bswap32(n);
  473. #else
  474. #ifdef _MSC_VER
  475. return (uint32_t)_byteswap_ulong((unsigned long)n);
  476. #else
  477. return htonl(n);
  478. #endif
  479. #endif
  480. }
  481. /**
  482. * Unconditionally swap bytes regardless of host byte order
  483. *
  484. * @param n Integer to swap
  485. * @return Integer with bytes reversed
  486. */
  487. static ZT_INLINE uint16_t swapBytes(const uint16_t n) noexcept
  488. {
  489. #if defined(__GNUC__)
  490. return __builtin_bswap16(n);
  491. #else
  492. #ifdef _MSC_VER
  493. return (uint16_t)_byteswap_ushort((unsigned short)n);
  494. #else
  495. return htons(n);
  496. #endif
  497. #endif
  498. }
  499. // These are helper adapters to load and swap integer types special cased by size
  500. // to work with all typedef'd variants, signed/unsigned, etc.
  501. template <typename I, unsigned int S> class _swap_bytes_bysize;
  502. template <typename I> class _swap_bytes_bysize<I, 1> {
  503. public:
  504. static ZT_INLINE I s(const I n) noexcept
  505. {
  506. return n;
  507. }
  508. };
  509. template <typename I> class _swap_bytes_bysize<I, 2> {
  510. public:
  511. static ZT_INLINE I s(const I n) noexcept
  512. {
  513. return (I)swapBytes((uint16_t)n);
  514. }
  515. };
  516. template <typename I> class _swap_bytes_bysize<I, 4> {
  517. public:
  518. static ZT_INLINE I s(const I n) noexcept
  519. {
  520. return (I)swapBytes((uint32_t)n);
  521. }
  522. };
  523. template <typename I> class _swap_bytes_bysize<I, 8> {
  524. public:
  525. static ZT_INLINE I s(const I n) noexcept
  526. {
  527. return (I)swapBytes((uint64_t)n);
  528. }
  529. };
  530. template <typename I, unsigned int S> class _load_be_bysize;
  531. template <typename I> class _load_be_bysize<I, 1> {
  532. public:
  533. static ZT_INLINE I l(const uint8_t* const p) noexcept
  534. {
  535. return p[0];
  536. }
  537. };
  538. template <typename I> class _load_be_bysize<I, 2> {
  539. public:
  540. static ZT_INLINE I l(const uint8_t* const p) noexcept
  541. {
  542. return (I)(((unsigned int)p[0] << 8U) | (unsigned int)p[1]);
  543. }
  544. };
  545. template <typename I> class _load_be_bysize<I, 4> {
  546. public:
  547. static ZT_INLINE I l(const uint8_t* const p) noexcept
  548. {
  549. return (I)(((uint32_t)p[0] << 24U) | ((uint32_t)p[1] << 16U) | ((uint32_t)p[2] << 8U) | (uint32_t)p[3]);
  550. }
  551. };
  552. template <typename I> class _load_be_bysize<I, 8> {
  553. public:
  554. static ZT_INLINE I l(const uint8_t* const p) noexcept
  555. {
  556. return (I)(((uint64_t)p[0] << 56U) | ((uint64_t)p[1] << 48U) | ((uint64_t)p[2] << 40U) | ((uint64_t)p[3] << 32U) | ((uint64_t)p[4] << 24U) | ((uint64_t)p[5] << 16U) | ((uint64_t)p[6] << 8U) | (uint64_t)p[7]);
  557. }
  558. };
  559. template <typename I, unsigned int S> class _load_le_bysize;
  560. template <typename I> class _load_le_bysize<I, 1> {
  561. public:
  562. static ZT_INLINE I l(const uint8_t* const p) noexcept
  563. {
  564. return p[0];
  565. }
  566. };
  567. template <typename I> class _load_le_bysize<I, 2> {
  568. public:
  569. static ZT_INLINE I l(const uint8_t* const p) noexcept
  570. {
  571. return (I)((unsigned int)p[0] | ((unsigned int)p[1] << 8U));
  572. }
  573. };
  574. template <typename I> class _load_le_bysize<I, 4> {
  575. public:
  576. static ZT_INLINE I l(const uint8_t* const p) noexcept
  577. {
  578. return (I)((uint32_t)p[0] | ((uint32_t)p[1] << 8U) | ((uint32_t)p[2] << 16U) | ((uint32_t)p[3] << 24U));
  579. }
  580. };
  581. template <typename I> class _load_le_bysize<I, 8> {
  582. public:
  583. static ZT_INLINE I l(const uint8_t* const p) noexcept
  584. {
  585. return (I)((uint64_t)p[0] | ((uint64_t)p[1] << 8U) | ((uint64_t)p[2] << 16U) | ((uint64_t)p[3] << 24U) | ((uint64_t)p[4] << 32U) | ((uint64_t)p[5] << 40U) | ((uint64_t)p[6] << 48U) | ((uint64_t)p[7]) << 56U);
  586. }
  587. };
  588. /**
  589. * Convert any signed or unsigned integer type to big-endian ("network") byte order
  590. *
  591. * @tparam I Integer type (usually inferred)
  592. * @param n Value to convert
  593. * @return Value in big-endian order
  594. */
  595. template <typename I> static ZT_INLINE I hton(const I n) noexcept
  596. {
  597. #if __BYTE_ORDER == __LITTLE_ENDIAN
  598. return _swap_bytes_bysize<I, sizeof(I)>::s(n);
  599. #else
  600. return n;
  601. #endif
  602. }
  603. /**
  604. * Convert any signed or unsigned integer type to host byte order from big-endian ("network") byte order
  605. *
  606. * @tparam I Integer type (usually inferred)
  607. * @param n Value to convert
  608. * @return Value in host byte order
  609. */
  610. template <typename I> static ZT_INLINE I ntoh(const I n) noexcept
  611. {
  612. #if __BYTE_ORDER == __LITTLE_ENDIAN
  613. return _swap_bytes_bysize<I, sizeof(I)>::s(n);
  614. #else
  615. return n;
  616. #endif
  617. }
  618. /**
  619. * Copy bits from memory into an integer type without modifying their order
  620. *
  621. * @tparam I Type to load
  622. * @param p Byte stream, must be at least sizeof(I) in size
  623. * @return Loaded raw integer
  624. */
  625. template <typename I> static ZT_INLINE I loadMachineEndian(const void* const p) noexcept
  626. {
  627. #ifdef ZT_NO_UNALIGNED_ACCESS
  628. I tmp;
  629. for (int i = 0; i < (int)sizeof(I); ++i) {
  630. reinterpret_cast<uint8_t*>(&tmp)[i] = reinterpret_cast<const uint8_t*>(p)[i];
  631. }
  632. return tmp;
  633. #else
  634. return *reinterpret_cast<const I*>(p);
  635. #endif
  636. }
  637. /**
  638. * Copy bits from memory into an integer type without modifying their order
  639. *
  640. * @tparam I Type to store
  641. * @param p Byte array (must be at least sizeof(I))
  642. * @param i Integer to store
  643. */
  644. template <typename I> static ZT_INLINE void storeMachineEndian(void* const p, const I i) noexcept
  645. {
  646. #ifdef ZT_NO_UNALIGNED_ACCESS
  647. for (unsigned int k = 0; k < sizeof(I); ++k) {
  648. reinterpret_cast<uint8_t*>(p)[k] = reinterpret_cast<const uint8_t*>(&i)[k];
  649. }
  650. #else
  651. *reinterpret_cast<I*>(p) = i;
  652. #endif
  653. }
  654. /**
  655. * Decode a big-endian value from a byte stream
  656. *
  657. * @tparam I Type to decode (should be unsigned e.g. uint32_t or uint64_t)
  658. * @param p Byte stream, must be at least sizeof(I) in size
  659. * @return Decoded integer
  660. */
  661. template <typename I> static ZT_INLINE I loadBigEndian(const void* const p) noexcept
  662. {
  663. #ifdef ZT_NO_UNALIGNED_ACCESS
  664. return _load_be_bysize<I, sizeof(I)>::l(reinterpret_cast<const uint8_t*>(p));
  665. #else
  666. return ntoh(*reinterpret_cast<const I*>(p));
  667. #endif
  668. }
  669. /**
  670. * Save an integer in big-endian format
  671. *
  672. * @tparam I Integer type to store (usually inferred)
  673. * @param p Byte stream to write (must be at least sizeof(I))
  674. * #param i Integer to write
  675. */
  676. template <typename I> static ZT_INLINE void storeBigEndian(void* const p, I i) noexcept
  677. {
  678. #ifdef ZT_NO_UNALIGNED_ACCESS
  679. storeMachineEndian(p, hton(i));
  680. #else
  681. *reinterpret_cast<I*>(p) = hton(i);
  682. #endif
  683. }
  684. /**
  685. * Decode a little-endian value from a byte stream
  686. *
  687. * @tparam I Type to decode
  688. * @param p Byte stream, must be at least sizeof(I) in size
  689. * @return Decoded integer
  690. */
  691. template <typename I> static ZT_INLINE I loadLittleEndian(const void* const p) noexcept
  692. {
  693. #if __BYTE_ORDER == __BIG_ENDIAN || defined(ZT_NO_UNALIGNED_ACCESS)
  694. return _load_le_bysize<I, sizeof(I)>::l(reinterpret_cast<const uint8_t*>(p));
  695. #else
  696. return *reinterpret_cast<const I*>(p);
  697. #endif
  698. }
  699. /**
  700. * Save an integer in little-endian format
  701. *
  702. * @tparam I Integer type to store (usually inferred)
  703. * @param p Byte stream to write (must be at least sizeof(I))
  704. * #param i Integer to write
  705. */
  706. template <typename I> static ZT_INLINE void storeLittleEndian(void* const p, const I i) noexcept
  707. {
  708. #if __BYTE_ORDER == __BIG_ENDIAN
  709. storeMachineEndian(p, _swap_bytes_bysize<I, sizeof(I)>::s(i));
  710. #else
  711. #ifdef ZT_NO_UNALIGNED_ACCESS
  712. storeMachineEndian(p, i);
  713. #else
  714. *reinterpret_cast<I*>(p) = i;
  715. #endif
  716. #endif
  717. }
  718. /**
  719. * Copy memory block whose size is known at compile time.
  720. *
  721. * @tparam L Size of memory
  722. * @param dest Destination memory
  723. * @param src Source memory
  724. */
  725. template <unsigned long L> static ZT_INLINE void copy(void* dest, const void* src) noexcept
  726. {
  727. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  728. uintptr_t l = L;
  729. __asm__ __volatile__("cld ; rep movsb" : "+c"(l), "+S"(src), "+D"(dest)::"memory");
  730. #else
  731. memcpy(dest, src, L);
  732. #endif
  733. }
  734. /**
  735. * Copy memory block whose size is known at run time
  736. *
  737. * @param dest Destination memory
  738. * @param src Source memory
  739. * @param len Bytes to copy
  740. */
  741. static ZT_INLINE void copy(void* dest, const void* src, unsigned long len) noexcept
  742. {
  743. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  744. __asm__ __volatile__("cld ; rep movsb" : "+c"(len), "+S"(src), "+D"(dest)::"memory");
  745. #else
  746. memcpy(dest, src, len);
  747. #endif
  748. }
  749. /**
  750. * Zero memory block whose size is known at compile time
  751. *
  752. * @tparam L Size in bytes
  753. * @param dest Memory to zero
  754. */
  755. template <unsigned long L> static ZT_INLINE void zero(void* dest) noexcept
  756. {
  757. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  758. uintptr_t l = L;
  759. __asm__ __volatile__("cld ; rep stosb" : "+c"(l), "+D"(dest) : "a"(0) : "memory");
  760. #else
  761. memset(dest, 0, L);
  762. #endif
  763. }
  764. /**
  765. * Zero memory block whose size is known at run time
  766. *
  767. * @param dest Memory to zero
  768. * @param len Size in bytes
  769. */
  770. static ZT_INLINE void zero(void* dest, unsigned long len) noexcept
  771. {
  772. #if defined(ZT_ARCH_X64) && defined(__GNUC__)
  773. __asm__ __volatile__("cld ; rep stosb" : "+c"(len), "+D"(dest) : "a"(0) : "memory");
  774. #else
  775. memset(dest, 0, len);
  776. #endif
  777. }
  778. /**
  779. * Hexadecimal characters 0-f
  780. */
  781. static const char HEXCHARS[16];
  782. /*
  783. * Remove `-` and `:` from a MAC address (in-place).
  784. *
  785. * @param mac The MAC address
  786. */
  787. static inline void cleanMac(std::string& mac)
  788. {
  789. auto start = mac.begin();
  790. auto end = mac.end();
  791. auto new_end = std::remove_if(start, end, [](char c) { return c == 45 || c == 58; });
  792. mac.erase(new_end, end);
  793. }
  794. };
  795. } // namespace ZeroTier
  796. #endif