Salsa20.cpp 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359
  1. /*
  2. * Based on public domain code available at: http://cr.yp.to/snuffle.html
  3. *
  4. * Modifications and C-native SSE macro based SSE implementation by
  5. * Adam Ierymenko <[email protected]>.
  6. *
  7. * Since the original was public domain, this is too.
  8. */
  9. #include "Salsa20.hpp"
  10. #include "Constants.hpp"
  11. #define ROTATE(v, c) (((v) << (c)) | ((v) >> (32 - (c))))
  12. #define XOR(v, w) ((v) ^ (w))
  13. #define PLUS(v, w) ((uint32_t)((v) + (w)))
  14. // Set up load/store macros with appropriate endianness (we don't use these in SSE mode)
  15. #ifndef ZT_SALSA20_SSE
  16. #if __BYTE_ORDER == __LITTLE_ENDIAN
  17. #ifdef ZT_NO_TYPE_PUNNING
  18. // Slower version that does not use type punning
  19. #define U8TO32_LITTLE(p) (((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24))
  20. static inline void U32TO8_LITTLE(uint8_t* const c, const uint32_t v)
  21. {
  22. c[0] = (uint8_t)v;
  23. c[1] = (uint8_t)(v >> 8);
  24. c[2] = (uint8_t)(v >> 16);
  25. c[3] = (uint8_t)(v >> 24);
  26. }
  27. #else
  28. // Fast version that just does 32-bit load/store
  29. #define U8TO32_LITTLE(p) (*((const uint32_t*)((const void*)(p))))
  30. #define U32TO8_LITTLE(c, v) *((uint32_t*)((void*)(c))) = (v)
  31. #endif // ZT_NO_TYPE_PUNNING
  32. #else // __BYTE_ORDER == __BIG_ENDIAN (we don't support anything else... does MIDDLE_ENDIAN even still exist?)
  33. #ifdef __GNUC__
  34. // Use GNUC builtin bswap macros on big-endian machines if available
  35. #define U8TO32_LITTLE(p) __builtin_bswap32(*((const uint32_t*)((const void*)(p))))
  36. #define U32TO8_LITTLE(c, v) *((uint32_t*)((void*)(c))) = __builtin_bswap32((v))
  37. #else // no __GNUC__
  38. // Otherwise do it the slow, manual way on BE machines
  39. #define U8TO32_LITTLE(p) (((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24))
  40. static inline void U32TO8_LITTLE(uint8_t* const c, const uint32_t v)
  41. {
  42. c[0] = (uint8_t)v;
  43. c[1] = (uint8_t)(v >> 8);
  44. c[2] = (uint8_t)(v >> 16);
  45. c[3] = (uint8_t)(v >> 24);
  46. }
  47. #endif // __GNUC__ or not
  48. #endif // __BYTE_ORDER little or big?
  49. #endif // !ZT_SALSA20_SSE
  50. // Statically compute and define SSE constants
  51. #ifdef ZT_SALSA20_SSE
  52. class _s20sseconsts {
  53. public:
  54. _s20sseconsts()
  55. {
  56. maskLo32 = _mm_shuffle_epi32(_mm_cvtsi32_si128(-1), _MM_SHUFFLE(1, 0, 1, 0));
  57. maskHi32 = _mm_slli_epi64(maskLo32, 32);
  58. }
  59. __m128i maskLo32, maskHi32;
  60. };
  61. static const _s20sseconsts _S20SSECONSTANTS;
  62. #endif
  63. namespace ZeroTier {
  64. void Salsa20::init(const void* key, const void* iv)
  65. {
  66. #ifdef ZT_SALSA20_SSE
  67. const uint32_t* const k = (const uint32_t*)key;
  68. _state.i[0] = 0x61707865;
  69. _state.i[1] = 0x3320646e;
  70. _state.i[2] = 0x79622d32;
  71. _state.i[3] = 0x6b206574;
  72. _state.i[4] = k[3];
  73. _state.i[5] = 0;
  74. _state.i[6] = k[7];
  75. _state.i[7] = k[2];
  76. _state.i[8] = 0;
  77. _state.i[9] = k[6];
  78. _state.i[10] = k[1];
  79. _state.i[11] = ((const uint32_t*)iv)[1];
  80. _state.i[12] = k[5];
  81. _state.i[13] = k[0];
  82. _state.i[14] = ((const uint32_t*)iv)[0];
  83. _state.i[15] = k[4];
  84. #else
  85. const char* const constants = "expand 32-byte k";
  86. const uint8_t* const k = (const uint8_t*)key;
  87. _state.i[0] = U8TO32_LITTLE(constants + 0);
  88. _state.i[1] = U8TO32_LITTLE(k + 0);
  89. _state.i[2] = U8TO32_LITTLE(k + 4);
  90. _state.i[3] = U8TO32_LITTLE(k + 8);
  91. _state.i[4] = U8TO32_LITTLE(k + 12);
  92. _state.i[5] = U8TO32_LITTLE(constants + 4);
  93. _state.i[6] = U8TO32_LITTLE(((const uint8_t*)iv) + 0);
  94. _state.i[7] = U8TO32_LITTLE(((const uint8_t*)iv) + 4);
  95. _state.i[8] = 0;
  96. _state.i[9] = 0;
  97. _state.i[10] = U8TO32_LITTLE(constants + 8);
  98. _state.i[11] = U8TO32_LITTLE(k + 16);
  99. _state.i[12] = U8TO32_LITTLE(k + 20);
  100. _state.i[13] = U8TO32_LITTLE(k + 24);
  101. _state.i[14] = U8TO32_LITTLE(k + 28);
  102. _state.i[15] = U8TO32_LITTLE(constants + 12);
  103. #endif
  104. }
  105. void Salsa20::crypt12(const void* in, void* out, unsigned int bytes)
  106. {
  107. uint8_t tmp[64];
  108. const uint8_t* m = (const uint8_t*)in;
  109. uint8_t* c = (uint8_t*)out;
  110. uint8_t* ctarget = c;
  111. unsigned int i;
  112. #ifndef ZT_SALSA20_SSE
  113. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  114. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  115. #endif
  116. if (! bytes) {
  117. return;
  118. }
  119. #ifndef ZT_SALSA20_SSE
  120. j0 = _state.i[0];
  121. j1 = _state.i[1];
  122. j2 = _state.i[2];
  123. j3 = _state.i[3];
  124. j4 = _state.i[4];
  125. j5 = _state.i[5];
  126. j6 = _state.i[6];
  127. j7 = _state.i[7];
  128. j8 = _state.i[8];
  129. j9 = _state.i[9];
  130. j10 = _state.i[10];
  131. j11 = _state.i[11];
  132. j12 = _state.i[12];
  133. j13 = _state.i[13];
  134. j14 = _state.i[14];
  135. j15 = _state.i[15];
  136. #endif
  137. for (;;) {
  138. if (bytes < 64) {
  139. for (i = 0; i < bytes; ++i) {
  140. tmp[i] = m[i];
  141. }
  142. m = tmp;
  143. ctarget = c;
  144. c = tmp;
  145. }
  146. #ifdef ZT_SALSA20_SSE
  147. __m128i X0 = _mm_loadu_si128((const __m128i*)&(_state.v[0]));
  148. __m128i X1 = _mm_loadu_si128((const __m128i*)&(_state.v[1]));
  149. __m128i X2 = _mm_loadu_si128((const __m128i*)&(_state.v[2]));
  150. __m128i X3 = _mm_loadu_si128((const __m128i*)&(_state.v[3]));
  151. __m128i T;
  152. __m128i X0s = X0;
  153. __m128i X1s = X1;
  154. __m128i X2s = X2;
  155. __m128i X3s = X3;
  156. // 2X round -------------------------------------------------------------
  157. T = _mm_add_epi32(X0, X3);
  158. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  159. T = _mm_add_epi32(X1, X0);
  160. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  161. T = _mm_add_epi32(X2, X1);
  162. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  163. T = _mm_add_epi32(X3, X2);
  164. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  165. X1 = _mm_shuffle_epi32(X1, 0x93);
  166. X2 = _mm_shuffle_epi32(X2, 0x4E);
  167. X3 = _mm_shuffle_epi32(X3, 0x39);
  168. T = _mm_add_epi32(X0, X1);
  169. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  170. T = _mm_add_epi32(X3, X0);
  171. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  172. T = _mm_add_epi32(X2, X3);
  173. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  174. T = _mm_add_epi32(X1, X2);
  175. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  176. X1 = _mm_shuffle_epi32(X1, 0x39);
  177. X2 = _mm_shuffle_epi32(X2, 0x4E);
  178. X3 = _mm_shuffle_epi32(X3, 0x93);
  179. // 2X round -------------------------------------------------------------
  180. T = _mm_add_epi32(X0, X3);
  181. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  182. T = _mm_add_epi32(X1, X0);
  183. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  184. T = _mm_add_epi32(X2, X1);
  185. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  186. T = _mm_add_epi32(X3, X2);
  187. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  188. X1 = _mm_shuffle_epi32(X1, 0x93);
  189. X2 = _mm_shuffle_epi32(X2, 0x4E);
  190. X3 = _mm_shuffle_epi32(X3, 0x39);
  191. T = _mm_add_epi32(X0, X1);
  192. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  193. T = _mm_add_epi32(X3, X0);
  194. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  195. T = _mm_add_epi32(X2, X3);
  196. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  197. T = _mm_add_epi32(X1, X2);
  198. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  199. X1 = _mm_shuffle_epi32(X1, 0x39);
  200. X2 = _mm_shuffle_epi32(X2, 0x4E);
  201. X3 = _mm_shuffle_epi32(X3, 0x93);
  202. // 2X round -------------------------------------------------------------
  203. T = _mm_add_epi32(X0, X3);
  204. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  205. T = _mm_add_epi32(X1, X0);
  206. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  207. T = _mm_add_epi32(X2, X1);
  208. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  209. T = _mm_add_epi32(X3, X2);
  210. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  211. X1 = _mm_shuffle_epi32(X1, 0x93);
  212. X2 = _mm_shuffle_epi32(X2, 0x4E);
  213. X3 = _mm_shuffle_epi32(X3, 0x39);
  214. T = _mm_add_epi32(X0, X1);
  215. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  216. T = _mm_add_epi32(X3, X0);
  217. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  218. T = _mm_add_epi32(X2, X3);
  219. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  220. T = _mm_add_epi32(X1, X2);
  221. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  222. X1 = _mm_shuffle_epi32(X1, 0x39);
  223. X2 = _mm_shuffle_epi32(X2, 0x4E);
  224. X3 = _mm_shuffle_epi32(X3, 0x93);
  225. // 2X round -------------------------------------------------------------
  226. T = _mm_add_epi32(X0, X3);
  227. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  228. T = _mm_add_epi32(X1, X0);
  229. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  230. T = _mm_add_epi32(X2, X1);
  231. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  232. T = _mm_add_epi32(X3, X2);
  233. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  234. X1 = _mm_shuffle_epi32(X1, 0x93);
  235. X2 = _mm_shuffle_epi32(X2, 0x4E);
  236. X3 = _mm_shuffle_epi32(X3, 0x39);
  237. T = _mm_add_epi32(X0, X1);
  238. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  239. T = _mm_add_epi32(X3, X0);
  240. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  241. T = _mm_add_epi32(X2, X3);
  242. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  243. T = _mm_add_epi32(X1, X2);
  244. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  245. X1 = _mm_shuffle_epi32(X1, 0x39);
  246. X2 = _mm_shuffle_epi32(X2, 0x4E);
  247. X3 = _mm_shuffle_epi32(X3, 0x93);
  248. // 2X round -------------------------------------------------------------
  249. T = _mm_add_epi32(X0, X3);
  250. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  251. T = _mm_add_epi32(X1, X0);
  252. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  253. T = _mm_add_epi32(X2, X1);
  254. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  255. T = _mm_add_epi32(X3, X2);
  256. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  257. X1 = _mm_shuffle_epi32(X1, 0x93);
  258. X2 = _mm_shuffle_epi32(X2, 0x4E);
  259. X3 = _mm_shuffle_epi32(X3, 0x39);
  260. T = _mm_add_epi32(X0, X1);
  261. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  262. T = _mm_add_epi32(X3, X0);
  263. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  264. T = _mm_add_epi32(X2, X3);
  265. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  266. T = _mm_add_epi32(X1, X2);
  267. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  268. X1 = _mm_shuffle_epi32(X1, 0x39);
  269. X2 = _mm_shuffle_epi32(X2, 0x4E);
  270. X3 = _mm_shuffle_epi32(X3, 0x93);
  271. // 2X round -------------------------------------------------------------
  272. T = _mm_add_epi32(X0, X3);
  273. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  274. T = _mm_add_epi32(X1, X0);
  275. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  276. T = _mm_add_epi32(X2, X1);
  277. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  278. T = _mm_add_epi32(X3, X2);
  279. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  280. X1 = _mm_shuffle_epi32(X1, 0x93);
  281. X2 = _mm_shuffle_epi32(X2, 0x4E);
  282. X3 = _mm_shuffle_epi32(X3, 0x39);
  283. T = _mm_add_epi32(X0, X1);
  284. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  285. T = _mm_add_epi32(X3, X0);
  286. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  287. T = _mm_add_epi32(X2, X3);
  288. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  289. T = _mm_add_epi32(X1, X2);
  290. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  291. X1 = _mm_shuffle_epi32(X1, 0x39);
  292. X2 = _mm_shuffle_epi32(X2, 0x4E);
  293. X3 = _mm_shuffle_epi32(X3, 0x93);
  294. X0 = _mm_add_epi32(X0s, X0);
  295. X1 = _mm_add_epi32(X1s, X1);
  296. X2 = _mm_add_epi32(X2s, X2);
  297. X3 = _mm_add_epi32(X3s, X3);
  298. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  299. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  300. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  301. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  302. _mm_storeu_ps(reinterpret_cast<float*>(c), _mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02, k20), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m))))));
  303. _mm_storeu_ps(reinterpret_cast<float*>(c) + 4, _mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13, k31), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 4)))));
  304. _mm_storeu_ps(reinterpret_cast<float*>(c) + 8, _mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20, k02), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 8)))));
  305. _mm_storeu_ps(reinterpret_cast<float*>(c) + 12, _mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31, k13), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 12)))));
  306. if (! (++_state.i[8])) {
  307. ++_state.i[5]; // state reordered for SSE
  308. /* stopping at 2^70 bytes per nonce is user's responsibility */
  309. }
  310. #else
  311. x0 = j0;
  312. x1 = j1;
  313. x2 = j2;
  314. x3 = j3;
  315. x4 = j4;
  316. x5 = j5;
  317. x6 = j6;
  318. x7 = j7;
  319. x8 = j8;
  320. x9 = j9;
  321. x10 = j10;
  322. x11 = j11;
  323. x12 = j12;
  324. x13 = j13;
  325. x14 = j14;
  326. x15 = j15;
  327. // 2X round -------------------------------------------------------------
  328. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  329. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  330. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  331. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  332. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  333. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  334. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  335. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  336. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  337. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  338. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  339. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  340. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  341. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  342. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  343. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  344. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  345. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  346. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  347. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  348. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  349. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  350. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  351. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  352. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  353. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  354. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  355. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  356. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  357. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  358. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  359. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  360. // 2X round -------------------------------------------------------------
  361. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  362. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  363. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  364. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  365. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  366. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  367. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  368. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  369. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  370. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  371. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  372. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  373. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  374. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  375. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  376. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  377. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  378. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  379. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  380. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  381. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  382. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  383. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  384. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  385. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  386. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  387. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  388. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  389. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  390. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  391. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  392. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  393. // 2X round -------------------------------------------------------------
  394. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  395. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  396. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  397. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  398. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  399. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  400. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  401. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  402. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  403. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  404. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  405. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  406. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  407. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  408. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  409. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  410. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  411. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  412. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  413. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  414. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  415. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  416. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  417. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  418. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  419. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  420. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  421. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  422. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  423. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  424. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  425. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  426. // 2X round -------------------------------------------------------------
  427. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  428. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  429. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  430. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  431. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  432. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  433. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  434. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  435. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  436. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  437. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  438. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  439. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  440. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  441. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  442. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  443. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  444. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  445. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  446. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  447. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  448. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  449. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  450. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  451. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  452. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  453. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  454. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  455. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  456. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  457. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  458. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  459. // 2X round -------------------------------------------------------------
  460. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  461. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  462. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  463. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  464. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  465. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  466. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  467. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  468. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  469. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  470. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  471. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  472. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  473. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  474. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  475. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  476. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  477. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  478. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  479. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  480. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  481. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  482. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  483. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  484. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  485. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  486. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  487. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  488. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  489. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  490. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  491. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  492. // 2X round -------------------------------------------------------------
  493. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  494. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  495. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  496. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  497. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  498. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  499. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  500. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  501. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  502. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  503. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  504. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  505. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  506. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  507. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  508. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  509. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  510. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  511. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  512. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  513. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  514. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  515. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  516. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  517. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  518. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  519. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  520. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  521. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  522. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  523. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  524. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  525. x0 = PLUS(x0, j0);
  526. x1 = PLUS(x1, j1);
  527. x2 = PLUS(x2, j2);
  528. x3 = PLUS(x3, j3);
  529. x4 = PLUS(x4, j4);
  530. x5 = PLUS(x5, j5);
  531. x6 = PLUS(x6, j6);
  532. x7 = PLUS(x7, j7);
  533. x8 = PLUS(x8, j8);
  534. x9 = PLUS(x9, j9);
  535. x10 = PLUS(x10, j10);
  536. x11 = PLUS(x11, j11);
  537. x12 = PLUS(x12, j12);
  538. x13 = PLUS(x13, j13);
  539. x14 = PLUS(x14, j14);
  540. x15 = PLUS(x15, j15);
  541. U32TO8_LITTLE(c + 0, XOR(x0, U8TO32_LITTLE(m + 0)));
  542. U32TO8_LITTLE(c + 4, XOR(x1, U8TO32_LITTLE(m + 4)));
  543. U32TO8_LITTLE(c + 8, XOR(x2, U8TO32_LITTLE(m + 8)));
  544. U32TO8_LITTLE(c + 12, XOR(x3, U8TO32_LITTLE(m + 12)));
  545. U32TO8_LITTLE(c + 16, XOR(x4, U8TO32_LITTLE(m + 16)));
  546. U32TO8_LITTLE(c + 20, XOR(x5, U8TO32_LITTLE(m + 20)));
  547. U32TO8_LITTLE(c + 24, XOR(x6, U8TO32_LITTLE(m + 24)));
  548. U32TO8_LITTLE(c + 28, XOR(x7, U8TO32_LITTLE(m + 28)));
  549. U32TO8_LITTLE(c + 32, XOR(x8, U8TO32_LITTLE(m + 32)));
  550. U32TO8_LITTLE(c + 36, XOR(x9, U8TO32_LITTLE(m + 36)));
  551. U32TO8_LITTLE(c + 40, XOR(x10, U8TO32_LITTLE(m + 40)));
  552. U32TO8_LITTLE(c + 44, XOR(x11, U8TO32_LITTLE(m + 44)));
  553. U32TO8_LITTLE(c + 48, XOR(x12, U8TO32_LITTLE(m + 48)));
  554. U32TO8_LITTLE(c + 52, XOR(x13, U8TO32_LITTLE(m + 52)));
  555. U32TO8_LITTLE(c + 56, XOR(x14, U8TO32_LITTLE(m + 56)));
  556. U32TO8_LITTLE(c + 60, XOR(x15, U8TO32_LITTLE(m + 60)));
  557. if (! (++j8)) {
  558. ++j9;
  559. /* stopping at 2^70 bytes per nonce is user's responsibility */
  560. }
  561. #endif
  562. if (bytes <= 64) {
  563. if (bytes < 64) {
  564. for (i = 0; i < bytes; ++i) {
  565. ctarget[i] = c[i];
  566. }
  567. }
  568. #ifndef ZT_SALSA20_SSE
  569. _state.i[8] = j8;
  570. _state.i[9] = j9;
  571. #endif
  572. return;
  573. }
  574. bytes -= 64;
  575. c += 64;
  576. m += 64;
  577. }
  578. }
  579. void Salsa20::crypt20(const void* in, void* out, unsigned int bytes)
  580. {
  581. uint8_t tmp[64];
  582. const uint8_t* m = (const uint8_t*)in;
  583. uint8_t* c = (uint8_t*)out;
  584. uint8_t* ctarget = c;
  585. unsigned int i;
  586. #ifndef ZT_SALSA20_SSE
  587. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  588. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  589. #endif
  590. if (! bytes) {
  591. return;
  592. }
  593. #ifndef ZT_SALSA20_SSE
  594. j0 = _state.i[0];
  595. j1 = _state.i[1];
  596. j2 = _state.i[2];
  597. j3 = _state.i[3];
  598. j4 = _state.i[4];
  599. j5 = _state.i[5];
  600. j6 = _state.i[6];
  601. j7 = _state.i[7];
  602. j8 = _state.i[8];
  603. j9 = _state.i[9];
  604. j10 = _state.i[10];
  605. j11 = _state.i[11];
  606. j12 = _state.i[12];
  607. j13 = _state.i[13];
  608. j14 = _state.i[14];
  609. j15 = _state.i[15];
  610. #endif
  611. for (;;) {
  612. if (bytes < 64) {
  613. for (i = 0; i < bytes; ++i) {
  614. tmp[i] = m[i];
  615. }
  616. m = tmp;
  617. ctarget = c;
  618. c = tmp;
  619. }
  620. #ifdef ZT_SALSA20_SSE
  621. __m128i X0 = _mm_loadu_si128((const __m128i*)&(_state.v[0]));
  622. __m128i X1 = _mm_loadu_si128((const __m128i*)&(_state.v[1]));
  623. __m128i X2 = _mm_loadu_si128((const __m128i*)&(_state.v[2]));
  624. __m128i X3 = _mm_loadu_si128((const __m128i*)&(_state.v[3]));
  625. __m128i T;
  626. __m128i X0s = X0;
  627. __m128i X1s = X1;
  628. __m128i X2s = X2;
  629. __m128i X3s = X3;
  630. // 2X round -------------------------------------------------------------
  631. T = _mm_add_epi32(X0, X3);
  632. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  633. T = _mm_add_epi32(X1, X0);
  634. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  635. T = _mm_add_epi32(X2, X1);
  636. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  637. T = _mm_add_epi32(X3, X2);
  638. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  639. X1 = _mm_shuffle_epi32(X1, 0x93);
  640. X2 = _mm_shuffle_epi32(X2, 0x4E);
  641. X3 = _mm_shuffle_epi32(X3, 0x39);
  642. T = _mm_add_epi32(X0, X1);
  643. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  644. T = _mm_add_epi32(X3, X0);
  645. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  646. T = _mm_add_epi32(X2, X3);
  647. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  648. T = _mm_add_epi32(X1, X2);
  649. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  650. X1 = _mm_shuffle_epi32(X1, 0x39);
  651. X2 = _mm_shuffle_epi32(X2, 0x4E);
  652. X3 = _mm_shuffle_epi32(X3, 0x93);
  653. // 2X round -------------------------------------------------------------
  654. T = _mm_add_epi32(X0, X3);
  655. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  656. T = _mm_add_epi32(X1, X0);
  657. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  658. T = _mm_add_epi32(X2, X1);
  659. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  660. T = _mm_add_epi32(X3, X2);
  661. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  662. X1 = _mm_shuffle_epi32(X1, 0x93);
  663. X2 = _mm_shuffle_epi32(X2, 0x4E);
  664. X3 = _mm_shuffle_epi32(X3, 0x39);
  665. T = _mm_add_epi32(X0, X1);
  666. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  667. T = _mm_add_epi32(X3, X0);
  668. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  669. T = _mm_add_epi32(X2, X3);
  670. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  671. T = _mm_add_epi32(X1, X2);
  672. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  673. X1 = _mm_shuffle_epi32(X1, 0x39);
  674. X2 = _mm_shuffle_epi32(X2, 0x4E);
  675. X3 = _mm_shuffle_epi32(X3, 0x93);
  676. // 2X round -------------------------------------------------------------
  677. T = _mm_add_epi32(X0, X3);
  678. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  679. T = _mm_add_epi32(X1, X0);
  680. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  681. T = _mm_add_epi32(X2, X1);
  682. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  683. T = _mm_add_epi32(X3, X2);
  684. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  685. X1 = _mm_shuffle_epi32(X1, 0x93);
  686. X2 = _mm_shuffle_epi32(X2, 0x4E);
  687. X3 = _mm_shuffle_epi32(X3, 0x39);
  688. T = _mm_add_epi32(X0, X1);
  689. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  690. T = _mm_add_epi32(X3, X0);
  691. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  692. T = _mm_add_epi32(X2, X3);
  693. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  694. T = _mm_add_epi32(X1, X2);
  695. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  696. X1 = _mm_shuffle_epi32(X1, 0x39);
  697. X2 = _mm_shuffle_epi32(X2, 0x4E);
  698. X3 = _mm_shuffle_epi32(X3, 0x93);
  699. // 2X round -------------------------------------------------------------
  700. T = _mm_add_epi32(X0, X3);
  701. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  702. T = _mm_add_epi32(X1, X0);
  703. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  704. T = _mm_add_epi32(X2, X1);
  705. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  706. T = _mm_add_epi32(X3, X2);
  707. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  708. X1 = _mm_shuffle_epi32(X1, 0x93);
  709. X2 = _mm_shuffle_epi32(X2, 0x4E);
  710. X3 = _mm_shuffle_epi32(X3, 0x39);
  711. T = _mm_add_epi32(X0, X1);
  712. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  713. T = _mm_add_epi32(X3, X0);
  714. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  715. T = _mm_add_epi32(X2, X3);
  716. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  717. T = _mm_add_epi32(X1, X2);
  718. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  719. X1 = _mm_shuffle_epi32(X1, 0x39);
  720. X2 = _mm_shuffle_epi32(X2, 0x4E);
  721. X3 = _mm_shuffle_epi32(X3, 0x93);
  722. // 2X round -------------------------------------------------------------
  723. T = _mm_add_epi32(X0, X3);
  724. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  725. T = _mm_add_epi32(X1, X0);
  726. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  727. T = _mm_add_epi32(X2, X1);
  728. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  729. T = _mm_add_epi32(X3, X2);
  730. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  731. X1 = _mm_shuffle_epi32(X1, 0x93);
  732. X2 = _mm_shuffle_epi32(X2, 0x4E);
  733. X3 = _mm_shuffle_epi32(X3, 0x39);
  734. T = _mm_add_epi32(X0, X1);
  735. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  736. T = _mm_add_epi32(X3, X0);
  737. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  738. T = _mm_add_epi32(X2, X3);
  739. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  740. T = _mm_add_epi32(X1, X2);
  741. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  742. X1 = _mm_shuffle_epi32(X1, 0x39);
  743. X2 = _mm_shuffle_epi32(X2, 0x4E);
  744. X3 = _mm_shuffle_epi32(X3, 0x93);
  745. // 2X round -------------------------------------------------------------
  746. T = _mm_add_epi32(X0, X3);
  747. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  748. T = _mm_add_epi32(X1, X0);
  749. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  750. T = _mm_add_epi32(X2, X1);
  751. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  752. T = _mm_add_epi32(X3, X2);
  753. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  754. X1 = _mm_shuffle_epi32(X1, 0x93);
  755. X2 = _mm_shuffle_epi32(X2, 0x4E);
  756. X3 = _mm_shuffle_epi32(X3, 0x39);
  757. T = _mm_add_epi32(X0, X1);
  758. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  759. T = _mm_add_epi32(X3, X0);
  760. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  761. T = _mm_add_epi32(X2, X3);
  762. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  763. T = _mm_add_epi32(X1, X2);
  764. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  765. X1 = _mm_shuffle_epi32(X1, 0x39);
  766. X2 = _mm_shuffle_epi32(X2, 0x4E);
  767. X3 = _mm_shuffle_epi32(X3, 0x93);
  768. // 2X round -------------------------------------------------------------
  769. T = _mm_add_epi32(X0, X3);
  770. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  771. T = _mm_add_epi32(X1, X0);
  772. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  773. T = _mm_add_epi32(X2, X1);
  774. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  775. T = _mm_add_epi32(X3, X2);
  776. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  777. X1 = _mm_shuffle_epi32(X1, 0x93);
  778. X2 = _mm_shuffle_epi32(X2, 0x4E);
  779. X3 = _mm_shuffle_epi32(X3, 0x39);
  780. T = _mm_add_epi32(X0, X1);
  781. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  782. T = _mm_add_epi32(X3, X0);
  783. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  784. T = _mm_add_epi32(X2, X3);
  785. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  786. T = _mm_add_epi32(X1, X2);
  787. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  788. X1 = _mm_shuffle_epi32(X1, 0x39);
  789. X2 = _mm_shuffle_epi32(X2, 0x4E);
  790. X3 = _mm_shuffle_epi32(X3, 0x93);
  791. // 2X round -------------------------------------------------------------
  792. T = _mm_add_epi32(X0, X3);
  793. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  794. T = _mm_add_epi32(X1, X0);
  795. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  796. T = _mm_add_epi32(X2, X1);
  797. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  798. T = _mm_add_epi32(X3, X2);
  799. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  800. X1 = _mm_shuffle_epi32(X1, 0x93);
  801. X2 = _mm_shuffle_epi32(X2, 0x4E);
  802. X3 = _mm_shuffle_epi32(X3, 0x39);
  803. T = _mm_add_epi32(X0, X1);
  804. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  805. T = _mm_add_epi32(X3, X0);
  806. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  807. T = _mm_add_epi32(X2, X3);
  808. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  809. T = _mm_add_epi32(X1, X2);
  810. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  811. X1 = _mm_shuffle_epi32(X1, 0x39);
  812. X2 = _mm_shuffle_epi32(X2, 0x4E);
  813. X3 = _mm_shuffle_epi32(X3, 0x93);
  814. // 2X round -------------------------------------------------------------
  815. T = _mm_add_epi32(X0, X3);
  816. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  817. T = _mm_add_epi32(X1, X0);
  818. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  819. T = _mm_add_epi32(X2, X1);
  820. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  821. T = _mm_add_epi32(X3, X2);
  822. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  823. X1 = _mm_shuffle_epi32(X1, 0x93);
  824. X2 = _mm_shuffle_epi32(X2, 0x4E);
  825. X3 = _mm_shuffle_epi32(X3, 0x39);
  826. T = _mm_add_epi32(X0, X1);
  827. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  828. T = _mm_add_epi32(X3, X0);
  829. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  830. T = _mm_add_epi32(X2, X3);
  831. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  832. T = _mm_add_epi32(X1, X2);
  833. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  834. X1 = _mm_shuffle_epi32(X1, 0x39);
  835. X2 = _mm_shuffle_epi32(X2, 0x4E);
  836. X3 = _mm_shuffle_epi32(X3, 0x93);
  837. // 2X round -------------------------------------------------------------
  838. T = _mm_add_epi32(X0, X3);
  839. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  840. T = _mm_add_epi32(X1, X0);
  841. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  842. T = _mm_add_epi32(X2, X1);
  843. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  844. T = _mm_add_epi32(X3, X2);
  845. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  846. X1 = _mm_shuffle_epi32(X1, 0x93);
  847. X2 = _mm_shuffle_epi32(X2, 0x4E);
  848. X3 = _mm_shuffle_epi32(X3, 0x39);
  849. T = _mm_add_epi32(X0, X1);
  850. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  851. T = _mm_add_epi32(X3, X0);
  852. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  853. T = _mm_add_epi32(X2, X3);
  854. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  855. T = _mm_add_epi32(X1, X2);
  856. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  857. X1 = _mm_shuffle_epi32(X1, 0x39);
  858. X2 = _mm_shuffle_epi32(X2, 0x4E);
  859. X3 = _mm_shuffle_epi32(X3, 0x93);
  860. X0 = _mm_add_epi32(X0s, X0);
  861. X1 = _mm_add_epi32(X1s, X1);
  862. X2 = _mm_add_epi32(X2s, X2);
  863. X3 = _mm_add_epi32(X3s, X3);
  864. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  865. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  866. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  867. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  868. _mm_storeu_ps(reinterpret_cast<float*>(c), _mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02, k20), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m))))));
  869. _mm_storeu_ps(reinterpret_cast<float*>(c) + 4, _mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13, k31), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 4)))));
  870. _mm_storeu_ps(reinterpret_cast<float*>(c) + 8, _mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20, k02), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 8)))));
  871. _mm_storeu_ps(reinterpret_cast<float*>(c) + 12, _mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31, k13), _mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float*>(m) + 12)))));
  872. if (! (++_state.i[8])) {
  873. ++_state.i[5]; // state reordered for SSE
  874. /* stopping at 2^70 bytes per nonce is user's responsibility */
  875. }
  876. #else
  877. x0 = j0;
  878. x1 = j1;
  879. x2 = j2;
  880. x3 = j3;
  881. x4 = j4;
  882. x5 = j5;
  883. x6 = j6;
  884. x7 = j7;
  885. x8 = j8;
  886. x9 = j9;
  887. x10 = j10;
  888. x11 = j11;
  889. x12 = j12;
  890. x13 = j13;
  891. x14 = j14;
  892. x15 = j15;
  893. // 2X round -------------------------------------------------------------
  894. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  895. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  896. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  897. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  898. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  899. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  900. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  901. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  902. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  903. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  904. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  905. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  906. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  907. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  908. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  909. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  910. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  911. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  912. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  913. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  914. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  915. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  916. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  917. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  918. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  919. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  920. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  921. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  922. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  923. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  924. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  925. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  926. // 2X round -------------------------------------------------------------
  927. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  928. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  929. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  930. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  931. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  932. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  933. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  934. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  935. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  936. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  937. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  938. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  939. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  940. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  941. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  942. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  943. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  944. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  945. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  946. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  947. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  948. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  949. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  950. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  951. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  952. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  953. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  954. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  955. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  956. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  957. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  958. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  959. // 2X round -------------------------------------------------------------
  960. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  961. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  962. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  963. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  964. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  965. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  966. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  967. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  968. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  969. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  970. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  971. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  972. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  973. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  974. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  975. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  976. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  977. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  978. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  979. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  980. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  981. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  982. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  983. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  984. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  985. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  986. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  987. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  988. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  989. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  990. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  991. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  992. // 2X round -------------------------------------------------------------
  993. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  994. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  995. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  996. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  997. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  998. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  999. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1000. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1001. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1002. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1003. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1004. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1005. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1006. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1007. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1008. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1009. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1010. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1011. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1012. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1013. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1014. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1015. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1016. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1017. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1018. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1019. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1020. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1021. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1022. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1023. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1024. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1025. // 2X round -------------------------------------------------------------
  1026. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1027. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1028. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1029. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1030. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1031. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1032. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1033. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1034. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1035. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1036. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1037. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1038. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1039. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1040. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1041. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1042. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1043. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1044. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1045. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1046. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1047. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1048. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1049. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1050. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1051. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1052. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1053. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1054. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1055. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1056. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1057. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1058. // 2X round -------------------------------------------------------------
  1059. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1060. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1061. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1062. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1063. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1064. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1065. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1066. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1067. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1068. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1069. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1070. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1071. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1072. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1073. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1074. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1075. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1076. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1077. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1078. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1079. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1080. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1081. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1082. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1083. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1084. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1085. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1086. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1087. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1088. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1089. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1090. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1091. // 2X round -------------------------------------------------------------
  1092. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1093. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1094. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1095. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1096. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1097. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1098. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1099. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1100. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1101. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1102. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1103. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1104. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1105. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1106. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1107. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1108. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1109. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1110. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1111. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1112. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1113. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1114. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1115. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1116. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1117. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1118. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1119. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1120. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1121. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1122. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1123. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1124. // 2X round -------------------------------------------------------------
  1125. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1126. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1127. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1128. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1129. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1130. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1131. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1132. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1133. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1134. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1135. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1136. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1137. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1138. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1139. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1140. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1141. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1142. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1143. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1144. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1145. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1146. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1147. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1148. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1149. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1150. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1151. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1152. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1153. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1154. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1155. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1156. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1157. // 2X round -------------------------------------------------------------
  1158. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1159. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1160. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1161. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1162. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1163. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1164. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1165. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1166. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1167. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1168. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1169. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1170. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1171. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1172. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1173. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1174. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1175. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1176. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1177. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1178. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1179. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1180. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1181. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1182. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1183. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1184. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1185. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1186. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1187. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1188. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1189. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1190. // 2X round -------------------------------------------------------------
  1191. x4 = XOR(x4, ROTATE(PLUS(x0, x12), 7));
  1192. x8 = XOR(x8, ROTATE(PLUS(x4, x0), 9));
  1193. x12 = XOR(x12, ROTATE(PLUS(x8, x4), 13));
  1194. x0 = XOR(x0, ROTATE(PLUS(x12, x8), 18));
  1195. x9 = XOR(x9, ROTATE(PLUS(x5, x1), 7));
  1196. x13 = XOR(x13, ROTATE(PLUS(x9, x5), 9));
  1197. x1 = XOR(x1, ROTATE(PLUS(x13, x9), 13));
  1198. x5 = XOR(x5, ROTATE(PLUS(x1, x13), 18));
  1199. x14 = XOR(x14, ROTATE(PLUS(x10, x6), 7));
  1200. x2 = XOR(x2, ROTATE(PLUS(x14, x10), 9));
  1201. x6 = XOR(x6, ROTATE(PLUS(x2, x14), 13));
  1202. x10 = XOR(x10, ROTATE(PLUS(x6, x2), 18));
  1203. x3 = XOR(x3, ROTATE(PLUS(x15, x11), 7));
  1204. x7 = XOR(x7, ROTATE(PLUS(x3, x15), 9));
  1205. x11 = XOR(x11, ROTATE(PLUS(x7, x3), 13));
  1206. x15 = XOR(x15, ROTATE(PLUS(x11, x7), 18));
  1207. x1 = XOR(x1, ROTATE(PLUS(x0, x3), 7));
  1208. x2 = XOR(x2, ROTATE(PLUS(x1, x0), 9));
  1209. x3 = XOR(x3, ROTATE(PLUS(x2, x1), 13));
  1210. x0 = XOR(x0, ROTATE(PLUS(x3, x2), 18));
  1211. x6 = XOR(x6, ROTATE(PLUS(x5, x4), 7));
  1212. x7 = XOR(x7, ROTATE(PLUS(x6, x5), 9));
  1213. x4 = XOR(x4, ROTATE(PLUS(x7, x6), 13));
  1214. x5 = XOR(x5, ROTATE(PLUS(x4, x7), 18));
  1215. x11 = XOR(x11, ROTATE(PLUS(x10, x9), 7));
  1216. x8 = XOR(x8, ROTATE(PLUS(x11, x10), 9));
  1217. x9 = XOR(x9, ROTATE(PLUS(x8, x11), 13));
  1218. x10 = XOR(x10, ROTATE(PLUS(x9, x8), 18));
  1219. x12 = XOR(x12, ROTATE(PLUS(x15, x14), 7));
  1220. x13 = XOR(x13, ROTATE(PLUS(x12, x15), 9));
  1221. x14 = XOR(x14, ROTATE(PLUS(x13, x12), 13));
  1222. x15 = XOR(x15, ROTATE(PLUS(x14, x13), 18));
  1223. x0 = PLUS(x0, j0);
  1224. x1 = PLUS(x1, j1);
  1225. x2 = PLUS(x2, j2);
  1226. x3 = PLUS(x3, j3);
  1227. x4 = PLUS(x4, j4);
  1228. x5 = PLUS(x5, j5);
  1229. x6 = PLUS(x6, j6);
  1230. x7 = PLUS(x7, j7);
  1231. x8 = PLUS(x8, j8);
  1232. x9 = PLUS(x9, j9);
  1233. x10 = PLUS(x10, j10);
  1234. x11 = PLUS(x11, j11);
  1235. x12 = PLUS(x12, j12);
  1236. x13 = PLUS(x13, j13);
  1237. x14 = PLUS(x14, j14);
  1238. x15 = PLUS(x15, j15);
  1239. U32TO8_LITTLE(c + 0, XOR(x0, U8TO32_LITTLE(m + 0)));
  1240. U32TO8_LITTLE(c + 4, XOR(x1, U8TO32_LITTLE(m + 4)));
  1241. U32TO8_LITTLE(c + 8, XOR(x2, U8TO32_LITTLE(m + 8)));
  1242. U32TO8_LITTLE(c + 12, XOR(x3, U8TO32_LITTLE(m + 12)));
  1243. U32TO8_LITTLE(c + 16, XOR(x4, U8TO32_LITTLE(m + 16)));
  1244. U32TO8_LITTLE(c + 20, XOR(x5, U8TO32_LITTLE(m + 20)));
  1245. U32TO8_LITTLE(c + 24, XOR(x6, U8TO32_LITTLE(m + 24)));
  1246. U32TO8_LITTLE(c + 28, XOR(x7, U8TO32_LITTLE(m + 28)));
  1247. U32TO8_LITTLE(c + 32, XOR(x8, U8TO32_LITTLE(m + 32)));
  1248. U32TO8_LITTLE(c + 36, XOR(x9, U8TO32_LITTLE(m + 36)));
  1249. U32TO8_LITTLE(c + 40, XOR(x10, U8TO32_LITTLE(m + 40)));
  1250. U32TO8_LITTLE(c + 44, XOR(x11, U8TO32_LITTLE(m + 44)));
  1251. U32TO8_LITTLE(c + 48, XOR(x12, U8TO32_LITTLE(m + 48)));
  1252. U32TO8_LITTLE(c + 52, XOR(x13, U8TO32_LITTLE(m + 52)));
  1253. U32TO8_LITTLE(c + 56, XOR(x14, U8TO32_LITTLE(m + 56)));
  1254. U32TO8_LITTLE(c + 60, XOR(x15, U8TO32_LITTLE(m + 60)));
  1255. if (! (++j8)) {
  1256. ++j9;
  1257. /* stopping at 2^70 bytes per nonce is user's responsibility */
  1258. }
  1259. #endif
  1260. if (bytes <= 64) {
  1261. if (bytes < 64) {
  1262. for (i = 0; i < bytes; ++i) {
  1263. ctarget[i] = c[i];
  1264. }
  1265. }
  1266. #ifndef ZT_SALSA20_SSE
  1267. _state.i[8] = j8;
  1268. _state.i[9] = j9;
  1269. #endif
  1270. return;
  1271. }
  1272. bytes -= 64;
  1273. c += 64;
  1274. m += 64;
  1275. }
  1276. }
  1277. } // namespace ZeroTier