Salsa20.cpp 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347
  1. /*
  2. * Based on public domain code available at: http://cr.yp.to/snuffle.html
  3. *
  4. * Modifications and C-native SSE macro based SSE implementation by
  5. * Adam Ierymenko <[email protected]>.
  6. *
  7. * Since the original was public domain, this is too.
  8. */
  9. #include "Constants.hpp"
  10. #include "Salsa20.hpp"
  11. #define ROTATE(v,c) (((v) << (c)) | ((v) >> (32 - (c))))
  12. #define XOR(v,w) ((v) ^ (w))
  13. #define PLUS(v,w) ((uint32_t)((v) + (w)))
  14. // Set up load/store macros with appropriate endianness (we don't use these in SSE mode)
  15. #ifndef ZT_SALSA20_SSE
  16. #if __BYTE_ORDER == __LITTLE_ENDIAN
  17. #ifdef ZT_NO_TYPE_PUNNING
  18. // Slower version that does not use type punning
  19. #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
  20. static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
  21. #else
  22. // Fast version that just does 32-bit load/store
  23. #define U8TO32_LITTLE(p) (*((const uint32_t *)((const void *)(p))))
  24. #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = (v)
  25. #endif // ZT_NO_TYPE_PUNNING
  26. #else // __BYTE_ORDER == __BIG_ENDIAN (we don't support anything else... does MIDDLE_ENDIAN even still exist?)
  27. #ifdef __GNUC__
  28. // Use GNUC builtin bswap macros on big-endian machines if available
  29. #define U8TO32_LITTLE(p) __builtin_bswap32(*((const uint32_t *)((const void *)(p))))
  30. #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = __builtin_bswap32((v))
  31. #else // no __GNUC__
  32. // Otherwise do it the slow, manual way on BE machines
  33. #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
  34. static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
  35. #endif // __GNUC__ or not
  36. #endif // __BYTE_ORDER little or big?
  37. #endif // !ZT_SALSA20_SSE
  38. // Statically compute and define SSE constants
  39. #ifdef ZT_SALSA20_SSE
  40. class _s20sseconsts
  41. {
  42. public:
  43. _s20sseconsts()
  44. {
  45. maskLo32 = _mm_shuffle_epi32(_mm_cvtsi32_si128(-1), _MM_SHUFFLE(1, 0, 1, 0));
  46. maskHi32 = _mm_slli_epi64(maskLo32, 32);
  47. }
  48. __m128i maskLo32,maskHi32;
  49. };
  50. static const _s20sseconsts _S20SSECONSTANTS;
  51. #endif
  52. namespace ZeroTier {
  53. void Salsa20::init(const void *key,const void *iv)
  54. {
  55. #ifdef ZT_SALSA20_SSE
  56. const uint32_t *const k = (const uint32_t *)key;
  57. _state.i[0] = 0x61707865;
  58. _state.i[1] = 0x3320646e;
  59. _state.i[2] = 0x79622d32;
  60. _state.i[3] = 0x6b206574;
  61. _state.i[4] = k[3];
  62. _state.i[5] = 0;
  63. _state.i[6] = k[7];
  64. _state.i[7] = k[2];
  65. _state.i[8] = 0;
  66. _state.i[9] = k[6];
  67. _state.i[10] = k[1];
  68. _state.i[11] = ((const uint32_t *)iv)[1];
  69. _state.i[12] = k[5];
  70. _state.i[13] = k[0];
  71. _state.i[14] = ((const uint32_t *)iv)[0];
  72. _state.i[15] = k[4];
  73. #else
  74. const char *const constants = "expand 32-byte k";
  75. const uint8_t *const k = (const uint8_t *)key;
  76. _state.i[0] = U8TO32_LITTLE(constants + 0);
  77. _state.i[1] = U8TO32_LITTLE(k + 0);
  78. _state.i[2] = U8TO32_LITTLE(k + 4);
  79. _state.i[3] = U8TO32_LITTLE(k + 8);
  80. _state.i[4] = U8TO32_LITTLE(k + 12);
  81. _state.i[5] = U8TO32_LITTLE(constants + 4);
  82. _state.i[6] = U8TO32_LITTLE(((const uint8_t *)iv) + 0);
  83. _state.i[7] = U8TO32_LITTLE(((const uint8_t *)iv) + 4);
  84. _state.i[8] = 0;
  85. _state.i[9] = 0;
  86. _state.i[10] = U8TO32_LITTLE(constants + 8);
  87. _state.i[11] = U8TO32_LITTLE(k + 16);
  88. _state.i[12] = U8TO32_LITTLE(k + 20);
  89. _state.i[13] = U8TO32_LITTLE(k + 24);
  90. _state.i[14] = U8TO32_LITTLE(k + 28);
  91. _state.i[15] = U8TO32_LITTLE(constants + 12);
  92. #endif
  93. }
  94. void Salsa20::crypt12(const void *in,void *out,unsigned int bytes)
  95. {
  96. uint8_t tmp[64];
  97. const uint8_t *m = (const uint8_t *)in;
  98. uint8_t *c = (uint8_t *)out;
  99. uint8_t *ctarget = c;
  100. unsigned int i;
  101. #ifndef ZT_SALSA20_SSE
  102. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  103. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  104. #endif
  105. if (!bytes) {
  106. return;
  107. }
  108. #ifndef ZT_SALSA20_SSE
  109. j0 = _state.i[0];
  110. j1 = _state.i[1];
  111. j2 = _state.i[2];
  112. j3 = _state.i[3];
  113. j4 = _state.i[4];
  114. j5 = _state.i[5];
  115. j6 = _state.i[6];
  116. j7 = _state.i[7];
  117. j8 = _state.i[8];
  118. j9 = _state.i[9];
  119. j10 = _state.i[10];
  120. j11 = _state.i[11];
  121. j12 = _state.i[12];
  122. j13 = _state.i[13];
  123. j14 = _state.i[14];
  124. j15 = _state.i[15];
  125. #endif
  126. for (;;) {
  127. if (bytes < 64) {
  128. for (i = 0;i < bytes;++i) {
  129. tmp[i] = m[i];
  130. }
  131. m = tmp;
  132. ctarget = c;
  133. c = tmp;
  134. }
  135. #ifdef ZT_SALSA20_SSE
  136. __m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
  137. __m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
  138. __m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
  139. __m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
  140. __m128i T;
  141. __m128i X0s = X0;
  142. __m128i X1s = X1;
  143. __m128i X2s = X2;
  144. __m128i X3s = X3;
  145. // 2X round -------------------------------------------------------------
  146. T = _mm_add_epi32(X0, X3);
  147. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  148. T = _mm_add_epi32(X1, X0);
  149. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  150. T = _mm_add_epi32(X2, X1);
  151. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  152. T = _mm_add_epi32(X3, X2);
  153. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  154. X1 = _mm_shuffle_epi32(X1, 0x93);
  155. X2 = _mm_shuffle_epi32(X2, 0x4E);
  156. X3 = _mm_shuffle_epi32(X3, 0x39);
  157. T = _mm_add_epi32(X0, X1);
  158. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  159. T = _mm_add_epi32(X3, X0);
  160. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  161. T = _mm_add_epi32(X2, X3);
  162. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  163. T = _mm_add_epi32(X1, X2);
  164. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  165. X1 = _mm_shuffle_epi32(X1, 0x39);
  166. X2 = _mm_shuffle_epi32(X2, 0x4E);
  167. X3 = _mm_shuffle_epi32(X3, 0x93);
  168. // 2X round -------------------------------------------------------------
  169. T = _mm_add_epi32(X0, X3);
  170. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  171. T = _mm_add_epi32(X1, X0);
  172. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  173. T = _mm_add_epi32(X2, X1);
  174. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  175. T = _mm_add_epi32(X3, X2);
  176. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  177. X1 = _mm_shuffle_epi32(X1, 0x93);
  178. X2 = _mm_shuffle_epi32(X2, 0x4E);
  179. X3 = _mm_shuffle_epi32(X3, 0x39);
  180. T = _mm_add_epi32(X0, X1);
  181. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  182. T = _mm_add_epi32(X3, X0);
  183. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  184. T = _mm_add_epi32(X2, X3);
  185. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  186. T = _mm_add_epi32(X1, X2);
  187. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  188. X1 = _mm_shuffle_epi32(X1, 0x39);
  189. X2 = _mm_shuffle_epi32(X2, 0x4E);
  190. X3 = _mm_shuffle_epi32(X3, 0x93);
  191. // 2X round -------------------------------------------------------------
  192. T = _mm_add_epi32(X0, X3);
  193. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  194. T = _mm_add_epi32(X1, X0);
  195. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  196. T = _mm_add_epi32(X2, X1);
  197. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  198. T = _mm_add_epi32(X3, X2);
  199. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  200. X1 = _mm_shuffle_epi32(X1, 0x93);
  201. X2 = _mm_shuffle_epi32(X2, 0x4E);
  202. X3 = _mm_shuffle_epi32(X3, 0x39);
  203. T = _mm_add_epi32(X0, X1);
  204. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  205. T = _mm_add_epi32(X3, X0);
  206. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  207. T = _mm_add_epi32(X2, X3);
  208. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  209. T = _mm_add_epi32(X1, X2);
  210. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  211. X1 = _mm_shuffle_epi32(X1, 0x39);
  212. X2 = _mm_shuffle_epi32(X2, 0x4E);
  213. X3 = _mm_shuffle_epi32(X3, 0x93);
  214. // 2X round -------------------------------------------------------------
  215. T = _mm_add_epi32(X0, X3);
  216. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  217. T = _mm_add_epi32(X1, X0);
  218. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  219. T = _mm_add_epi32(X2, X1);
  220. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  221. T = _mm_add_epi32(X3, X2);
  222. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  223. X1 = _mm_shuffle_epi32(X1, 0x93);
  224. X2 = _mm_shuffle_epi32(X2, 0x4E);
  225. X3 = _mm_shuffle_epi32(X3, 0x39);
  226. T = _mm_add_epi32(X0, X1);
  227. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  228. T = _mm_add_epi32(X3, X0);
  229. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  230. T = _mm_add_epi32(X2, X3);
  231. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  232. T = _mm_add_epi32(X1, X2);
  233. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  234. X1 = _mm_shuffle_epi32(X1, 0x39);
  235. X2 = _mm_shuffle_epi32(X2, 0x4E);
  236. X3 = _mm_shuffle_epi32(X3, 0x93);
  237. // 2X round -------------------------------------------------------------
  238. T = _mm_add_epi32(X0, X3);
  239. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  240. T = _mm_add_epi32(X1, X0);
  241. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  242. T = _mm_add_epi32(X2, X1);
  243. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  244. T = _mm_add_epi32(X3, X2);
  245. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  246. X1 = _mm_shuffle_epi32(X1, 0x93);
  247. X2 = _mm_shuffle_epi32(X2, 0x4E);
  248. X3 = _mm_shuffle_epi32(X3, 0x39);
  249. T = _mm_add_epi32(X0, X1);
  250. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  251. T = _mm_add_epi32(X3, X0);
  252. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  253. T = _mm_add_epi32(X2, X3);
  254. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  255. T = _mm_add_epi32(X1, X2);
  256. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  257. X1 = _mm_shuffle_epi32(X1, 0x39);
  258. X2 = _mm_shuffle_epi32(X2, 0x4E);
  259. X3 = _mm_shuffle_epi32(X3, 0x93);
  260. // 2X round -------------------------------------------------------------
  261. T = _mm_add_epi32(X0, X3);
  262. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  263. T = _mm_add_epi32(X1, X0);
  264. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  265. T = _mm_add_epi32(X2, X1);
  266. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  267. T = _mm_add_epi32(X3, X2);
  268. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  269. X1 = _mm_shuffle_epi32(X1, 0x93);
  270. X2 = _mm_shuffle_epi32(X2, 0x4E);
  271. X3 = _mm_shuffle_epi32(X3, 0x39);
  272. T = _mm_add_epi32(X0, X1);
  273. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  274. T = _mm_add_epi32(X3, X0);
  275. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  276. T = _mm_add_epi32(X2, X3);
  277. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  278. T = _mm_add_epi32(X1, X2);
  279. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  280. X1 = _mm_shuffle_epi32(X1, 0x39);
  281. X2 = _mm_shuffle_epi32(X2, 0x4E);
  282. X3 = _mm_shuffle_epi32(X3, 0x93);
  283. X0 = _mm_add_epi32(X0s,X0);
  284. X1 = _mm_add_epi32(X1s,X1);
  285. X2 = _mm_add_epi32(X2s,X2);
  286. X3 = _mm_add_epi32(X3s,X3);
  287. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  288. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  289. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  290. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  291. _mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
  292. _mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
  293. _mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
  294. _mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
  295. if (!(++_state.i[8])) {
  296. ++_state.i[5]; // state reordered for SSE
  297. /* stopping at 2^70 bytes per nonce is user's responsibility */
  298. }
  299. #else
  300. x0 = j0;
  301. x1 = j1;
  302. x2 = j2;
  303. x3 = j3;
  304. x4 = j4;
  305. x5 = j5;
  306. x6 = j6;
  307. x7 = j7;
  308. x8 = j8;
  309. x9 = j9;
  310. x10 = j10;
  311. x11 = j11;
  312. x12 = j12;
  313. x13 = j13;
  314. x14 = j14;
  315. x15 = j15;
  316. // 2X round -------------------------------------------------------------
  317. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  318. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  319. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  320. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  321. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  322. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  323. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  324. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  325. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  326. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  327. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  328. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  329. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  330. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  331. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  332. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  333. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  334. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  335. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  336. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  337. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  338. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  339. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  340. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  341. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  342. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  343. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  344. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  345. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  346. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  347. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  348. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  349. // 2X round -------------------------------------------------------------
  350. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  351. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  352. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  353. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  354. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  355. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  356. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  357. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  358. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  359. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  360. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  361. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  362. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  363. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  364. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  365. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  366. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  367. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  368. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  369. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  370. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  371. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  372. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  373. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  374. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  375. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  376. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  377. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  378. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  379. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  380. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  381. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  382. // 2X round -------------------------------------------------------------
  383. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  384. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  385. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  386. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  387. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  388. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  389. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  390. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  391. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  392. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  393. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  394. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  395. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  396. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  397. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  398. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  399. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  400. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  401. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  402. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  403. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  404. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  405. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  406. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  407. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  408. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  409. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  410. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  411. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  412. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  413. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  414. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  415. // 2X round -------------------------------------------------------------
  416. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  417. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  418. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  419. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  420. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  421. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  422. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  423. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  424. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  425. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  426. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  427. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  428. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  429. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  430. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  431. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  432. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  433. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  434. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  435. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  436. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  437. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  438. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  439. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  440. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  441. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  442. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  443. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  444. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  445. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  446. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  447. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  448. // 2X round -------------------------------------------------------------
  449. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  450. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  451. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  452. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  453. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  454. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  455. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  456. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  457. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  458. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  459. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  460. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  461. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  462. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  463. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  464. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  465. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  466. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  467. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  468. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  469. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  470. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  471. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  472. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  473. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  474. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  475. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  476. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  477. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  478. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  479. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  480. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  481. // 2X round -------------------------------------------------------------
  482. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  483. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  484. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  485. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  486. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  487. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  488. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  489. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  490. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  491. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  492. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  493. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  494. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  495. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  496. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  497. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  498. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  499. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  500. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  501. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  502. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  503. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  504. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  505. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  506. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  507. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  508. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  509. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  510. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  511. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  512. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  513. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  514. x0 = PLUS(x0,j0);
  515. x1 = PLUS(x1,j1);
  516. x2 = PLUS(x2,j2);
  517. x3 = PLUS(x3,j3);
  518. x4 = PLUS(x4,j4);
  519. x5 = PLUS(x5,j5);
  520. x6 = PLUS(x6,j6);
  521. x7 = PLUS(x7,j7);
  522. x8 = PLUS(x8,j8);
  523. x9 = PLUS(x9,j9);
  524. x10 = PLUS(x10,j10);
  525. x11 = PLUS(x11,j11);
  526. x12 = PLUS(x12,j12);
  527. x13 = PLUS(x13,j13);
  528. x14 = PLUS(x14,j14);
  529. x15 = PLUS(x15,j15);
  530. U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
  531. U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
  532. U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
  533. U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
  534. U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
  535. U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
  536. U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
  537. U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
  538. U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
  539. U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
  540. U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
  541. U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
  542. U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
  543. U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
  544. U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
  545. U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
  546. if (!(++j8)) {
  547. ++j9;
  548. /* stopping at 2^70 bytes per nonce is user's responsibility */
  549. }
  550. #endif
  551. if (bytes <= 64) {
  552. if (bytes < 64) {
  553. for (i = 0;i < bytes;++i) {
  554. ctarget[i] = c[i];
  555. }
  556. }
  557. #ifndef ZT_SALSA20_SSE
  558. _state.i[8] = j8;
  559. _state.i[9] = j9;
  560. #endif
  561. return;
  562. }
  563. bytes -= 64;
  564. c += 64;
  565. m += 64;
  566. }
  567. }
  568. void Salsa20::crypt20(const void *in,void *out,unsigned int bytes)
  569. {
  570. uint8_t tmp[64];
  571. const uint8_t *m = (const uint8_t *)in;
  572. uint8_t *c = (uint8_t *)out;
  573. uint8_t *ctarget = c;
  574. unsigned int i;
  575. #ifndef ZT_SALSA20_SSE
  576. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  577. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  578. #endif
  579. if (!bytes) {
  580. return;
  581. }
  582. #ifndef ZT_SALSA20_SSE
  583. j0 = _state.i[0];
  584. j1 = _state.i[1];
  585. j2 = _state.i[2];
  586. j3 = _state.i[3];
  587. j4 = _state.i[4];
  588. j5 = _state.i[5];
  589. j6 = _state.i[6];
  590. j7 = _state.i[7];
  591. j8 = _state.i[8];
  592. j9 = _state.i[9];
  593. j10 = _state.i[10];
  594. j11 = _state.i[11];
  595. j12 = _state.i[12];
  596. j13 = _state.i[13];
  597. j14 = _state.i[14];
  598. j15 = _state.i[15];
  599. #endif
  600. for (;;) {
  601. if (bytes < 64) {
  602. for (i = 0;i < bytes;++i) {
  603. tmp[i] = m[i];
  604. }
  605. m = tmp;
  606. ctarget = c;
  607. c = tmp;
  608. }
  609. #ifdef ZT_SALSA20_SSE
  610. __m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
  611. __m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
  612. __m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
  613. __m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
  614. __m128i T;
  615. __m128i X0s = X0;
  616. __m128i X1s = X1;
  617. __m128i X2s = X2;
  618. __m128i X3s = X3;
  619. // 2X round -------------------------------------------------------------
  620. T = _mm_add_epi32(X0, X3);
  621. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  622. T = _mm_add_epi32(X1, X0);
  623. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  624. T = _mm_add_epi32(X2, X1);
  625. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  626. T = _mm_add_epi32(X3, X2);
  627. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  628. X1 = _mm_shuffle_epi32(X1, 0x93);
  629. X2 = _mm_shuffle_epi32(X2, 0x4E);
  630. X3 = _mm_shuffle_epi32(X3, 0x39);
  631. T = _mm_add_epi32(X0, X1);
  632. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  633. T = _mm_add_epi32(X3, X0);
  634. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  635. T = _mm_add_epi32(X2, X3);
  636. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  637. T = _mm_add_epi32(X1, X2);
  638. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  639. X1 = _mm_shuffle_epi32(X1, 0x39);
  640. X2 = _mm_shuffle_epi32(X2, 0x4E);
  641. X3 = _mm_shuffle_epi32(X3, 0x93);
  642. // 2X round -------------------------------------------------------------
  643. T = _mm_add_epi32(X0, X3);
  644. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  645. T = _mm_add_epi32(X1, X0);
  646. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  647. T = _mm_add_epi32(X2, X1);
  648. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  649. T = _mm_add_epi32(X3, X2);
  650. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  651. X1 = _mm_shuffle_epi32(X1, 0x93);
  652. X2 = _mm_shuffle_epi32(X2, 0x4E);
  653. X3 = _mm_shuffle_epi32(X3, 0x39);
  654. T = _mm_add_epi32(X0, X1);
  655. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  656. T = _mm_add_epi32(X3, X0);
  657. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  658. T = _mm_add_epi32(X2, X3);
  659. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  660. T = _mm_add_epi32(X1, X2);
  661. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  662. X1 = _mm_shuffle_epi32(X1, 0x39);
  663. X2 = _mm_shuffle_epi32(X2, 0x4E);
  664. X3 = _mm_shuffle_epi32(X3, 0x93);
  665. // 2X round -------------------------------------------------------------
  666. T = _mm_add_epi32(X0, X3);
  667. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  668. T = _mm_add_epi32(X1, X0);
  669. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  670. T = _mm_add_epi32(X2, X1);
  671. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  672. T = _mm_add_epi32(X3, X2);
  673. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  674. X1 = _mm_shuffle_epi32(X1, 0x93);
  675. X2 = _mm_shuffle_epi32(X2, 0x4E);
  676. X3 = _mm_shuffle_epi32(X3, 0x39);
  677. T = _mm_add_epi32(X0, X1);
  678. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  679. T = _mm_add_epi32(X3, X0);
  680. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  681. T = _mm_add_epi32(X2, X3);
  682. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  683. T = _mm_add_epi32(X1, X2);
  684. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  685. X1 = _mm_shuffle_epi32(X1, 0x39);
  686. X2 = _mm_shuffle_epi32(X2, 0x4E);
  687. X3 = _mm_shuffle_epi32(X3, 0x93);
  688. // 2X round -------------------------------------------------------------
  689. T = _mm_add_epi32(X0, X3);
  690. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  691. T = _mm_add_epi32(X1, X0);
  692. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  693. T = _mm_add_epi32(X2, X1);
  694. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  695. T = _mm_add_epi32(X3, X2);
  696. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  697. X1 = _mm_shuffle_epi32(X1, 0x93);
  698. X2 = _mm_shuffle_epi32(X2, 0x4E);
  699. X3 = _mm_shuffle_epi32(X3, 0x39);
  700. T = _mm_add_epi32(X0, X1);
  701. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  702. T = _mm_add_epi32(X3, X0);
  703. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  704. T = _mm_add_epi32(X2, X3);
  705. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  706. T = _mm_add_epi32(X1, X2);
  707. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  708. X1 = _mm_shuffle_epi32(X1, 0x39);
  709. X2 = _mm_shuffle_epi32(X2, 0x4E);
  710. X3 = _mm_shuffle_epi32(X3, 0x93);
  711. // 2X round -------------------------------------------------------------
  712. T = _mm_add_epi32(X0, X3);
  713. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  714. T = _mm_add_epi32(X1, X0);
  715. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  716. T = _mm_add_epi32(X2, X1);
  717. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  718. T = _mm_add_epi32(X3, X2);
  719. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  720. X1 = _mm_shuffle_epi32(X1, 0x93);
  721. X2 = _mm_shuffle_epi32(X2, 0x4E);
  722. X3 = _mm_shuffle_epi32(X3, 0x39);
  723. T = _mm_add_epi32(X0, X1);
  724. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  725. T = _mm_add_epi32(X3, X0);
  726. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  727. T = _mm_add_epi32(X2, X3);
  728. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  729. T = _mm_add_epi32(X1, X2);
  730. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  731. X1 = _mm_shuffle_epi32(X1, 0x39);
  732. X2 = _mm_shuffle_epi32(X2, 0x4E);
  733. X3 = _mm_shuffle_epi32(X3, 0x93);
  734. // 2X round -------------------------------------------------------------
  735. T = _mm_add_epi32(X0, X3);
  736. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  737. T = _mm_add_epi32(X1, X0);
  738. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  739. T = _mm_add_epi32(X2, X1);
  740. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  741. T = _mm_add_epi32(X3, X2);
  742. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  743. X1 = _mm_shuffle_epi32(X1, 0x93);
  744. X2 = _mm_shuffle_epi32(X2, 0x4E);
  745. X3 = _mm_shuffle_epi32(X3, 0x39);
  746. T = _mm_add_epi32(X0, X1);
  747. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  748. T = _mm_add_epi32(X3, X0);
  749. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  750. T = _mm_add_epi32(X2, X3);
  751. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  752. T = _mm_add_epi32(X1, X2);
  753. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  754. X1 = _mm_shuffle_epi32(X1, 0x39);
  755. X2 = _mm_shuffle_epi32(X2, 0x4E);
  756. X3 = _mm_shuffle_epi32(X3, 0x93);
  757. // 2X round -------------------------------------------------------------
  758. T = _mm_add_epi32(X0, X3);
  759. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  760. T = _mm_add_epi32(X1, X0);
  761. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  762. T = _mm_add_epi32(X2, X1);
  763. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  764. T = _mm_add_epi32(X3, X2);
  765. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  766. X1 = _mm_shuffle_epi32(X1, 0x93);
  767. X2 = _mm_shuffle_epi32(X2, 0x4E);
  768. X3 = _mm_shuffle_epi32(X3, 0x39);
  769. T = _mm_add_epi32(X0, X1);
  770. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  771. T = _mm_add_epi32(X3, X0);
  772. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  773. T = _mm_add_epi32(X2, X3);
  774. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  775. T = _mm_add_epi32(X1, X2);
  776. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  777. X1 = _mm_shuffle_epi32(X1, 0x39);
  778. X2 = _mm_shuffle_epi32(X2, 0x4E);
  779. X3 = _mm_shuffle_epi32(X3, 0x93);
  780. // 2X round -------------------------------------------------------------
  781. T = _mm_add_epi32(X0, X3);
  782. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  783. T = _mm_add_epi32(X1, X0);
  784. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  785. T = _mm_add_epi32(X2, X1);
  786. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  787. T = _mm_add_epi32(X3, X2);
  788. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  789. X1 = _mm_shuffle_epi32(X1, 0x93);
  790. X2 = _mm_shuffle_epi32(X2, 0x4E);
  791. X3 = _mm_shuffle_epi32(X3, 0x39);
  792. T = _mm_add_epi32(X0, X1);
  793. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  794. T = _mm_add_epi32(X3, X0);
  795. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  796. T = _mm_add_epi32(X2, X3);
  797. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  798. T = _mm_add_epi32(X1, X2);
  799. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  800. X1 = _mm_shuffle_epi32(X1, 0x39);
  801. X2 = _mm_shuffle_epi32(X2, 0x4E);
  802. X3 = _mm_shuffle_epi32(X3, 0x93);
  803. // 2X round -------------------------------------------------------------
  804. T = _mm_add_epi32(X0, X3);
  805. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  806. T = _mm_add_epi32(X1, X0);
  807. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  808. T = _mm_add_epi32(X2, X1);
  809. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  810. T = _mm_add_epi32(X3, X2);
  811. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  812. X1 = _mm_shuffle_epi32(X1, 0x93);
  813. X2 = _mm_shuffle_epi32(X2, 0x4E);
  814. X3 = _mm_shuffle_epi32(X3, 0x39);
  815. T = _mm_add_epi32(X0, X1);
  816. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  817. T = _mm_add_epi32(X3, X0);
  818. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  819. T = _mm_add_epi32(X2, X3);
  820. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  821. T = _mm_add_epi32(X1, X2);
  822. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  823. X1 = _mm_shuffle_epi32(X1, 0x39);
  824. X2 = _mm_shuffle_epi32(X2, 0x4E);
  825. X3 = _mm_shuffle_epi32(X3, 0x93);
  826. // 2X round -------------------------------------------------------------
  827. T = _mm_add_epi32(X0, X3);
  828. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  829. T = _mm_add_epi32(X1, X0);
  830. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  831. T = _mm_add_epi32(X2, X1);
  832. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  833. T = _mm_add_epi32(X3, X2);
  834. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  835. X1 = _mm_shuffle_epi32(X1, 0x93);
  836. X2 = _mm_shuffle_epi32(X2, 0x4E);
  837. X3 = _mm_shuffle_epi32(X3, 0x39);
  838. T = _mm_add_epi32(X0, X1);
  839. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  840. T = _mm_add_epi32(X3, X0);
  841. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  842. T = _mm_add_epi32(X2, X3);
  843. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  844. T = _mm_add_epi32(X1, X2);
  845. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  846. X1 = _mm_shuffle_epi32(X1, 0x39);
  847. X2 = _mm_shuffle_epi32(X2, 0x4E);
  848. X3 = _mm_shuffle_epi32(X3, 0x93);
  849. X0 = _mm_add_epi32(X0s,X0);
  850. X1 = _mm_add_epi32(X1s,X1);
  851. X2 = _mm_add_epi32(X2s,X2);
  852. X3 = _mm_add_epi32(X3s,X3);
  853. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  854. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  855. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  856. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  857. _mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
  858. _mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
  859. _mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
  860. _mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
  861. if (!(++_state.i[8])) {
  862. ++_state.i[5]; // state reordered for SSE
  863. /* stopping at 2^70 bytes per nonce is user's responsibility */
  864. }
  865. #else
  866. x0 = j0;
  867. x1 = j1;
  868. x2 = j2;
  869. x3 = j3;
  870. x4 = j4;
  871. x5 = j5;
  872. x6 = j6;
  873. x7 = j7;
  874. x8 = j8;
  875. x9 = j9;
  876. x10 = j10;
  877. x11 = j11;
  878. x12 = j12;
  879. x13 = j13;
  880. x14 = j14;
  881. x15 = j15;
  882. // 2X round -------------------------------------------------------------
  883. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  884. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  885. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  886. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  887. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  888. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  889. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  890. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  891. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  892. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  893. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  894. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  895. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  896. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  897. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  898. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  899. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  900. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  901. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  902. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  903. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  904. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  905. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  906. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  907. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  908. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  909. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  910. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  911. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  912. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  913. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  914. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  915. // 2X round -------------------------------------------------------------
  916. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  917. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  918. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  919. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  920. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  921. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  922. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  923. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  924. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  925. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  926. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  927. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  928. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  929. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  930. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  931. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  932. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  933. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  934. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  935. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  936. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  937. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  938. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  939. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  940. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  941. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  942. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  943. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  944. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  945. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  946. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  947. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  948. // 2X round -------------------------------------------------------------
  949. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  950. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  951. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  952. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  953. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  954. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  955. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  956. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  957. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  958. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  959. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  960. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  961. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  962. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  963. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  964. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  965. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  966. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  967. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  968. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  969. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  970. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  971. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  972. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  973. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  974. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  975. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  976. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  977. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  978. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  979. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  980. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  981. // 2X round -------------------------------------------------------------
  982. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  983. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  984. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  985. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  986. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  987. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  988. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  989. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  990. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  991. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  992. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  993. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  994. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  995. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  996. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  997. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  998. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  999. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1000. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1001. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1002. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1003. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1004. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1005. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1006. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1007. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1008. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1009. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1010. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1011. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1012. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1013. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1014. // 2X round -------------------------------------------------------------
  1015. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1016. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1017. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1018. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1019. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1020. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1021. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1022. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1023. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1024. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1025. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1026. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1027. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1028. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1029. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1030. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1031. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1032. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1033. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1034. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1035. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1036. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1037. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1038. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1039. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1040. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1041. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1042. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1043. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1044. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1045. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1046. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1047. // 2X round -------------------------------------------------------------
  1048. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1049. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1050. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1051. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1052. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1053. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1054. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1055. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1056. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1057. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1058. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1059. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1060. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1061. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1062. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1063. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1064. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1065. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1066. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1067. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1068. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1069. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1070. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1071. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1072. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1073. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1074. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1075. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1076. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1077. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1078. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1079. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1080. // 2X round -------------------------------------------------------------
  1081. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1082. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1083. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1084. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1085. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1086. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1087. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1088. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1089. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1090. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1091. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1092. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1093. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1094. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1095. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1096. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1097. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1098. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1099. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1100. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1101. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1102. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1103. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1104. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1105. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1106. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1107. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1108. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1109. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1110. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1111. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1112. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1113. // 2X round -------------------------------------------------------------
  1114. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1115. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1116. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1117. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1118. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1119. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1120. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1121. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1122. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1123. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1124. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1125. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1126. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1127. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1128. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1129. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1130. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1131. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1132. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1133. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1134. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1135. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1136. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1137. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1138. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1139. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1140. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1141. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1142. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1143. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1144. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1145. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1146. // 2X round -------------------------------------------------------------
  1147. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1148. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1149. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1150. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1151. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1152. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1153. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1154. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1155. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1156. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1157. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1158. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1159. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1160. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1161. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1162. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1163. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1164. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1165. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1166. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1167. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1168. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1169. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1170. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1171. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1172. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1173. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1174. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1175. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1176. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1177. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1178. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1179. // 2X round -------------------------------------------------------------
  1180. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1181. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1182. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1183. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1184. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1185. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1186. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1187. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1188. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1189. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1190. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1191. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1192. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1193. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1194. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1195. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1196. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1197. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1198. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1199. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1200. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1201. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1202. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1203. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1204. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1205. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1206. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1207. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1208. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1209. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1210. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1211. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1212. x0 = PLUS(x0,j0);
  1213. x1 = PLUS(x1,j1);
  1214. x2 = PLUS(x2,j2);
  1215. x3 = PLUS(x3,j3);
  1216. x4 = PLUS(x4,j4);
  1217. x5 = PLUS(x5,j5);
  1218. x6 = PLUS(x6,j6);
  1219. x7 = PLUS(x7,j7);
  1220. x8 = PLUS(x8,j8);
  1221. x9 = PLUS(x9,j9);
  1222. x10 = PLUS(x10,j10);
  1223. x11 = PLUS(x11,j11);
  1224. x12 = PLUS(x12,j12);
  1225. x13 = PLUS(x13,j13);
  1226. x14 = PLUS(x14,j14);
  1227. x15 = PLUS(x15,j15);
  1228. U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
  1229. U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
  1230. U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
  1231. U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
  1232. U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
  1233. U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
  1234. U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
  1235. U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
  1236. U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
  1237. U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
  1238. U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
  1239. U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
  1240. U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
  1241. U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
  1242. U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
  1243. U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
  1244. if (!(++j8)) {
  1245. ++j9;
  1246. /* stopping at 2^70 bytes per nonce is user's responsibility */
  1247. }
  1248. #endif
  1249. if (bytes <= 64) {
  1250. if (bytes < 64) {
  1251. for (i = 0;i < bytes;++i) {
  1252. ctarget[i] = c[i];
  1253. }
  1254. }
  1255. #ifndef ZT_SALSA20_SSE
  1256. _state.i[8] = j8;
  1257. _state.i[9] = j9;
  1258. #endif
  1259. return;
  1260. }
  1261. bytes -= 64;
  1262. c += 64;
  1263. m += 64;
  1264. }
  1265. }
  1266. } // namespace ZeroTier