Salsa20.cpp 54 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358
  1. /*
  2. * Based on public domain code available at: http://cr.yp.to/snuffle.html
  3. *
  4. * Modifications and C-native SSE macro based SSE implementation by
  5. * Adam Ierymenko <[email protected]>.
  6. *
  7. * Since the original was public domain, this is too.
  8. */
  9. #include "Constants.hpp"
  10. #include "Salsa20.hpp"
  11. #define ROTATE(v,c) (((v) << (c)) | ((v) >> (32 - (c))))
  12. #define XOR(v,w) ((v) ^ (w))
  13. #define PLUS(v,w) ((uint32_t)((v) + (w)))
  14. // Set up laod/store macros with appropriate endianness (we don't use these in SSE mode)
  15. #ifndef ZT_SALSA20_SSE
  16. #if __BYTE_ORDER == __LITTLE_ENDIAN
  17. #ifdef ZT_NO_TYPE_PUNNING
  18. // Slower version that does not use type punning
  19. #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
  20. static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
  21. #else
  22. // Fast version that just does 32-bit load/store
  23. #define U8TO32_LITTLE(p) (*((const uint32_t *)((const void *)(p))))
  24. #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = (v)
  25. #endif // ZT_NO_TYPE_PUNNING
  26. #else // __BYTE_ORDER == __BIG_ENDIAN (we don't support anything else... does MIDDLE_ENDIAN even still exist?)
  27. #ifdef __GNUC__
  28. // Use GNUC builtin bswap macros on big-endian machines if available
  29. #define U8TO32_LITTLE(p) __builtin_bswap32(*((const uint32_t *)((const void *)(p))))
  30. #define U32TO8_LITTLE(c,v) *((uint32_t *)((void *)(c))) = __builtin_bswap32((v))
  31. #else // no __GNUC__
  32. // Otherwise do it the slow, manual way on BE machines
  33. #define U8TO32_LITTLE(p) ( ((uint32_t)(p)[0]) | ((uint32_t)(p)[1] << 8) | ((uint32_t)(p)[2] << 16) | ((uint32_t)(p)[3] << 24) )
  34. static inline void U32TO8_LITTLE(uint8_t *const c,const uint32_t v) { c[0] = (uint8_t)v; c[1] = (uint8_t)(v >> 8); c[2] = (uint8_t)(v >> 16); c[3] = (uint8_t)(v >> 24); }
  35. #endif // __GNUC__ or not
  36. #endif // __BYTE_ORDER little or big?
  37. #endif // !ZT_SALSA20_SSE
  38. // Statically compute and define SSE constants
  39. #ifdef ZT_SALSA20_SSE
  40. class _s20sseconsts
  41. {
  42. public:
  43. _s20sseconsts()
  44. {
  45. maskLo32 = _mm_shuffle_epi32(_mm_cvtsi32_si128(-1), _MM_SHUFFLE(1, 0, 1, 0));
  46. maskHi32 = _mm_slli_epi64(maskLo32, 32);
  47. }
  48. __m128i maskLo32,maskHi32;
  49. };
  50. static const _s20sseconsts _S20SSECONSTANTS;
  51. #endif
  52. namespace ZeroTier {
  53. void Salsa20::init(const void *key,unsigned int kbits,const void *iv)
  54. throw()
  55. {
  56. #ifdef ZT_SALSA20_SSE
  57. const uint32_t *k = (const uint32_t *)key;
  58. _state.i[0] = 0x61707865;
  59. _state.i[3] = 0x6b206574;
  60. _state.i[13] = k[0];
  61. _state.i[10] = k[1];
  62. _state.i[7] = k[2];
  63. _state.i[4] = k[3];
  64. if (kbits == 256) {
  65. k += 4;
  66. _state.i[1] = 0x3320646e;
  67. _state.i[2] = 0x79622d32;
  68. } else {
  69. _state.i[1] = 0x3120646e;
  70. _state.i[2] = 0x79622d36;
  71. }
  72. _state.i[15] = k[0];
  73. _state.i[12] = k[1];
  74. _state.i[9] = k[2];
  75. _state.i[6] = k[3];
  76. _state.i[14] = ((const uint32_t *)iv)[0];
  77. _state.i[11] = ((const uint32_t *)iv)[1];
  78. _state.i[5] = 0;
  79. _state.i[8] = 0;
  80. #else
  81. const char *constants;
  82. const uint8_t *k = (const uint8_t *)key;
  83. _state.i[1] = U8TO32_LITTLE(k + 0);
  84. _state.i[2] = U8TO32_LITTLE(k + 4);
  85. _state.i[3] = U8TO32_LITTLE(k + 8);
  86. _state.i[4] = U8TO32_LITTLE(k + 12);
  87. if (kbits == 256) { /* recommended */
  88. k += 16;
  89. constants = "expand 32-byte k";
  90. } else { /* kbits == 128 */
  91. constants = "expand 16-byte k";
  92. }
  93. _state.i[5] = U8TO32_LITTLE(constants + 4);
  94. _state.i[6] = U8TO32_LITTLE(((const uint8_t *)iv) + 0);
  95. _state.i[7] = U8TO32_LITTLE(((const uint8_t *)iv) + 4);
  96. _state.i[8] = 0;
  97. _state.i[9] = 0;
  98. _state.i[10] = U8TO32_LITTLE(constants + 8);
  99. _state.i[11] = U8TO32_LITTLE(k + 0);
  100. _state.i[12] = U8TO32_LITTLE(k + 4);
  101. _state.i[13] = U8TO32_LITTLE(k + 8);
  102. _state.i[14] = U8TO32_LITTLE(k + 12);
  103. _state.i[15] = U8TO32_LITTLE(constants + 12);
  104. _state.i[0] = U8TO32_LITTLE(constants + 0);
  105. #endif
  106. }
  107. void Salsa20::encrypt12(const void *in,void *out,unsigned int bytes)
  108. throw()
  109. {
  110. uint8_t tmp[64];
  111. const uint8_t *m = (const uint8_t *)in;
  112. uint8_t *c = (uint8_t *)out;
  113. uint8_t *ctarget = c;
  114. unsigned int i;
  115. #ifndef ZT_SALSA20_SSE
  116. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  117. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  118. #endif
  119. if (!bytes)
  120. return;
  121. #ifndef ZT_SALSA20_SSE
  122. j0 = _state.i[0];
  123. j1 = _state.i[1];
  124. j2 = _state.i[2];
  125. j3 = _state.i[3];
  126. j4 = _state.i[4];
  127. j5 = _state.i[5];
  128. j6 = _state.i[6];
  129. j7 = _state.i[7];
  130. j8 = _state.i[8];
  131. j9 = _state.i[9];
  132. j10 = _state.i[10];
  133. j11 = _state.i[11];
  134. j12 = _state.i[12];
  135. j13 = _state.i[13];
  136. j14 = _state.i[14];
  137. j15 = _state.i[15];
  138. #endif
  139. for (;;) {
  140. if (bytes < 64) {
  141. for (i = 0;i < bytes;++i)
  142. tmp[i] = m[i];
  143. m = tmp;
  144. ctarget = c;
  145. c = tmp;
  146. }
  147. #ifdef ZT_SALSA20_SSE
  148. __m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
  149. __m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
  150. __m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
  151. __m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
  152. __m128i T;
  153. __m128i X0s = X0;
  154. __m128i X1s = X1;
  155. __m128i X2s = X2;
  156. __m128i X3s = X3;
  157. // 2X round -------------------------------------------------------------
  158. T = _mm_add_epi32(X0, X3);
  159. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  160. T = _mm_add_epi32(X1, X0);
  161. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  162. T = _mm_add_epi32(X2, X1);
  163. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  164. T = _mm_add_epi32(X3, X2);
  165. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  166. X1 = _mm_shuffle_epi32(X1, 0x93);
  167. X2 = _mm_shuffle_epi32(X2, 0x4E);
  168. X3 = _mm_shuffle_epi32(X3, 0x39);
  169. T = _mm_add_epi32(X0, X1);
  170. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  171. T = _mm_add_epi32(X3, X0);
  172. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  173. T = _mm_add_epi32(X2, X3);
  174. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  175. T = _mm_add_epi32(X1, X2);
  176. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  177. X1 = _mm_shuffle_epi32(X1, 0x39);
  178. X2 = _mm_shuffle_epi32(X2, 0x4E);
  179. X3 = _mm_shuffle_epi32(X3, 0x93);
  180. // 2X round -------------------------------------------------------------
  181. T = _mm_add_epi32(X0, X3);
  182. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  183. T = _mm_add_epi32(X1, X0);
  184. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  185. T = _mm_add_epi32(X2, X1);
  186. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  187. T = _mm_add_epi32(X3, X2);
  188. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  189. X1 = _mm_shuffle_epi32(X1, 0x93);
  190. X2 = _mm_shuffle_epi32(X2, 0x4E);
  191. X3 = _mm_shuffle_epi32(X3, 0x39);
  192. T = _mm_add_epi32(X0, X1);
  193. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  194. T = _mm_add_epi32(X3, X0);
  195. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  196. T = _mm_add_epi32(X2, X3);
  197. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  198. T = _mm_add_epi32(X1, X2);
  199. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  200. X1 = _mm_shuffle_epi32(X1, 0x39);
  201. X2 = _mm_shuffle_epi32(X2, 0x4E);
  202. X3 = _mm_shuffle_epi32(X3, 0x93);
  203. // 2X round -------------------------------------------------------------
  204. T = _mm_add_epi32(X0, X3);
  205. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  206. T = _mm_add_epi32(X1, X0);
  207. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  208. T = _mm_add_epi32(X2, X1);
  209. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  210. T = _mm_add_epi32(X3, X2);
  211. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  212. X1 = _mm_shuffle_epi32(X1, 0x93);
  213. X2 = _mm_shuffle_epi32(X2, 0x4E);
  214. X3 = _mm_shuffle_epi32(X3, 0x39);
  215. T = _mm_add_epi32(X0, X1);
  216. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  217. T = _mm_add_epi32(X3, X0);
  218. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  219. T = _mm_add_epi32(X2, X3);
  220. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  221. T = _mm_add_epi32(X1, X2);
  222. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  223. X1 = _mm_shuffle_epi32(X1, 0x39);
  224. X2 = _mm_shuffle_epi32(X2, 0x4E);
  225. X3 = _mm_shuffle_epi32(X3, 0x93);
  226. // 2X round -------------------------------------------------------------
  227. T = _mm_add_epi32(X0, X3);
  228. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  229. T = _mm_add_epi32(X1, X0);
  230. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  231. T = _mm_add_epi32(X2, X1);
  232. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  233. T = _mm_add_epi32(X3, X2);
  234. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  235. X1 = _mm_shuffle_epi32(X1, 0x93);
  236. X2 = _mm_shuffle_epi32(X2, 0x4E);
  237. X3 = _mm_shuffle_epi32(X3, 0x39);
  238. T = _mm_add_epi32(X0, X1);
  239. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  240. T = _mm_add_epi32(X3, X0);
  241. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  242. T = _mm_add_epi32(X2, X3);
  243. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  244. T = _mm_add_epi32(X1, X2);
  245. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  246. X1 = _mm_shuffle_epi32(X1, 0x39);
  247. X2 = _mm_shuffle_epi32(X2, 0x4E);
  248. X3 = _mm_shuffle_epi32(X3, 0x93);
  249. // 2X round -------------------------------------------------------------
  250. T = _mm_add_epi32(X0, X3);
  251. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  252. T = _mm_add_epi32(X1, X0);
  253. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  254. T = _mm_add_epi32(X2, X1);
  255. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  256. T = _mm_add_epi32(X3, X2);
  257. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  258. X1 = _mm_shuffle_epi32(X1, 0x93);
  259. X2 = _mm_shuffle_epi32(X2, 0x4E);
  260. X3 = _mm_shuffle_epi32(X3, 0x39);
  261. T = _mm_add_epi32(X0, X1);
  262. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  263. T = _mm_add_epi32(X3, X0);
  264. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  265. T = _mm_add_epi32(X2, X3);
  266. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  267. T = _mm_add_epi32(X1, X2);
  268. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  269. X1 = _mm_shuffle_epi32(X1, 0x39);
  270. X2 = _mm_shuffle_epi32(X2, 0x4E);
  271. X3 = _mm_shuffle_epi32(X3, 0x93);
  272. // 2X round -------------------------------------------------------------
  273. T = _mm_add_epi32(X0, X3);
  274. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  275. T = _mm_add_epi32(X1, X0);
  276. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  277. T = _mm_add_epi32(X2, X1);
  278. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  279. T = _mm_add_epi32(X3, X2);
  280. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  281. X1 = _mm_shuffle_epi32(X1, 0x93);
  282. X2 = _mm_shuffle_epi32(X2, 0x4E);
  283. X3 = _mm_shuffle_epi32(X3, 0x39);
  284. T = _mm_add_epi32(X0, X1);
  285. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  286. T = _mm_add_epi32(X3, X0);
  287. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  288. T = _mm_add_epi32(X2, X3);
  289. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  290. T = _mm_add_epi32(X1, X2);
  291. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  292. X1 = _mm_shuffle_epi32(X1, 0x39);
  293. X2 = _mm_shuffle_epi32(X2, 0x4E);
  294. X3 = _mm_shuffle_epi32(X3, 0x93);
  295. X0 = _mm_add_epi32(X0s,X0);
  296. X1 = _mm_add_epi32(X1s,X1);
  297. X2 = _mm_add_epi32(X2s,X2);
  298. X3 = _mm_add_epi32(X3s,X3);
  299. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  300. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  301. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  302. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  303. _mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
  304. _mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
  305. _mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
  306. _mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
  307. if (!(++_state.i[8])) {
  308. ++_state.i[5]; // state reordered for SSE
  309. /* stopping at 2^70 bytes per nonce is user's responsibility */
  310. }
  311. #else
  312. x0 = j0;
  313. x1 = j1;
  314. x2 = j2;
  315. x3 = j3;
  316. x4 = j4;
  317. x5 = j5;
  318. x6 = j6;
  319. x7 = j7;
  320. x8 = j8;
  321. x9 = j9;
  322. x10 = j10;
  323. x11 = j11;
  324. x12 = j12;
  325. x13 = j13;
  326. x14 = j14;
  327. x15 = j15;
  328. // 2X round -------------------------------------------------------------
  329. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  330. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  331. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  332. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  333. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  334. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  335. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  336. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  337. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  338. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  339. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  340. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  341. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  342. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  343. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  344. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  345. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  346. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  347. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  348. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  349. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  350. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  351. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  352. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  353. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  354. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  355. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  356. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  357. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  358. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  359. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  360. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  361. // 2X round -------------------------------------------------------------
  362. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  363. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  364. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  365. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  366. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  367. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  368. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  369. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  370. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  371. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  372. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  373. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  374. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  375. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  376. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  377. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  378. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  379. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  380. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  381. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  382. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  383. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  384. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  385. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  386. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  387. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  388. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  389. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  390. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  391. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  392. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  393. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  394. // 2X round -------------------------------------------------------------
  395. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  396. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  397. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  398. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  399. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  400. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  401. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  402. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  403. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  404. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  405. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  406. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  407. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  408. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  409. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  410. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  411. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  412. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  413. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  414. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  415. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  416. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  417. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  418. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  419. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  420. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  421. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  422. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  423. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  424. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  425. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  426. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  427. // 2X round -------------------------------------------------------------
  428. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  429. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  430. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  431. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  432. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  433. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  434. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  435. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  436. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  437. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  438. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  439. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  440. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  441. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  442. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  443. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  444. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  445. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  446. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  447. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  448. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  449. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  450. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  451. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  452. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  453. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  454. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  455. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  456. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  457. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  458. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  459. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  460. // 2X round -------------------------------------------------------------
  461. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  462. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  463. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  464. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  465. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  466. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  467. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  468. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  469. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  470. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  471. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  472. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  473. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  474. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  475. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  476. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  477. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  478. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  479. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  480. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  481. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  482. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  483. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  484. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  485. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  486. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  487. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  488. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  489. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  490. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  491. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  492. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  493. // 2X round -------------------------------------------------------------
  494. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  495. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  496. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  497. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  498. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  499. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  500. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  501. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  502. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  503. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  504. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  505. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  506. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  507. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  508. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  509. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  510. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  511. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  512. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  513. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  514. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  515. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  516. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  517. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  518. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  519. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  520. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  521. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  522. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  523. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  524. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  525. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  526. x0 = PLUS(x0,j0);
  527. x1 = PLUS(x1,j1);
  528. x2 = PLUS(x2,j2);
  529. x3 = PLUS(x3,j3);
  530. x4 = PLUS(x4,j4);
  531. x5 = PLUS(x5,j5);
  532. x6 = PLUS(x6,j6);
  533. x7 = PLUS(x7,j7);
  534. x8 = PLUS(x8,j8);
  535. x9 = PLUS(x9,j9);
  536. x10 = PLUS(x10,j10);
  537. x11 = PLUS(x11,j11);
  538. x12 = PLUS(x12,j12);
  539. x13 = PLUS(x13,j13);
  540. x14 = PLUS(x14,j14);
  541. x15 = PLUS(x15,j15);
  542. U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
  543. U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
  544. U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
  545. U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
  546. U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
  547. U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
  548. U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
  549. U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
  550. U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
  551. U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
  552. U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
  553. U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
  554. U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
  555. U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
  556. U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
  557. U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
  558. if (!(++j8)) {
  559. ++j9;
  560. /* stopping at 2^70 bytes per nonce is user's responsibility */
  561. }
  562. #endif
  563. if (bytes <= 64) {
  564. if (bytes < 64) {
  565. for (i = 0;i < bytes;++i)
  566. ctarget[i] = c[i];
  567. }
  568. #ifndef ZT_SALSA20_SSE
  569. _state.i[8] = j8;
  570. _state.i[9] = j9;
  571. #endif
  572. return;
  573. }
  574. bytes -= 64;
  575. c += 64;
  576. m += 64;
  577. }
  578. }
  579. void Salsa20::encrypt20(const void *in,void *out,unsigned int bytes)
  580. throw()
  581. {
  582. uint8_t tmp[64];
  583. const uint8_t *m = (const uint8_t *)in;
  584. uint8_t *c = (uint8_t *)out;
  585. uint8_t *ctarget = c;
  586. unsigned int i;
  587. #ifndef ZT_SALSA20_SSE
  588. uint32_t x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15;
  589. uint32_t j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15;
  590. #endif
  591. if (!bytes)
  592. return;
  593. #ifndef ZT_SALSA20_SSE
  594. j0 = _state.i[0];
  595. j1 = _state.i[1];
  596. j2 = _state.i[2];
  597. j3 = _state.i[3];
  598. j4 = _state.i[4];
  599. j5 = _state.i[5];
  600. j6 = _state.i[6];
  601. j7 = _state.i[7];
  602. j8 = _state.i[8];
  603. j9 = _state.i[9];
  604. j10 = _state.i[10];
  605. j11 = _state.i[11];
  606. j12 = _state.i[12];
  607. j13 = _state.i[13];
  608. j14 = _state.i[14];
  609. j15 = _state.i[15];
  610. #endif
  611. for (;;) {
  612. if (bytes < 64) {
  613. for (i = 0;i < bytes;++i)
  614. tmp[i] = m[i];
  615. m = tmp;
  616. ctarget = c;
  617. c = tmp;
  618. }
  619. #ifdef ZT_SALSA20_SSE
  620. __m128i X0 = _mm_loadu_si128((const __m128i *)&(_state.v[0]));
  621. __m128i X1 = _mm_loadu_si128((const __m128i *)&(_state.v[1]));
  622. __m128i X2 = _mm_loadu_si128((const __m128i *)&(_state.v[2]));
  623. __m128i X3 = _mm_loadu_si128((const __m128i *)&(_state.v[3]));
  624. __m128i T;
  625. __m128i X0s = X0;
  626. __m128i X1s = X1;
  627. __m128i X2s = X2;
  628. __m128i X3s = X3;
  629. // 2X round -------------------------------------------------------------
  630. T = _mm_add_epi32(X0, X3);
  631. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  632. T = _mm_add_epi32(X1, X0);
  633. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  634. T = _mm_add_epi32(X2, X1);
  635. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  636. T = _mm_add_epi32(X3, X2);
  637. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  638. X1 = _mm_shuffle_epi32(X1, 0x93);
  639. X2 = _mm_shuffle_epi32(X2, 0x4E);
  640. X3 = _mm_shuffle_epi32(X3, 0x39);
  641. T = _mm_add_epi32(X0, X1);
  642. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  643. T = _mm_add_epi32(X3, X0);
  644. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  645. T = _mm_add_epi32(X2, X3);
  646. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  647. T = _mm_add_epi32(X1, X2);
  648. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  649. X1 = _mm_shuffle_epi32(X1, 0x39);
  650. X2 = _mm_shuffle_epi32(X2, 0x4E);
  651. X3 = _mm_shuffle_epi32(X3, 0x93);
  652. // 2X round -------------------------------------------------------------
  653. T = _mm_add_epi32(X0, X3);
  654. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  655. T = _mm_add_epi32(X1, X0);
  656. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  657. T = _mm_add_epi32(X2, X1);
  658. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  659. T = _mm_add_epi32(X3, X2);
  660. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  661. X1 = _mm_shuffle_epi32(X1, 0x93);
  662. X2 = _mm_shuffle_epi32(X2, 0x4E);
  663. X3 = _mm_shuffle_epi32(X3, 0x39);
  664. T = _mm_add_epi32(X0, X1);
  665. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  666. T = _mm_add_epi32(X3, X0);
  667. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  668. T = _mm_add_epi32(X2, X3);
  669. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  670. T = _mm_add_epi32(X1, X2);
  671. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  672. X1 = _mm_shuffle_epi32(X1, 0x39);
  673. X2 = _mm_shuffle_epi32(X2, 0x4E);
  674. X3 = _mm_shuffle_epi32(X3, 0x93);
  675. // 2X round -------------------------------------------------------------
  676. T = _mm_add_epi32(X0, X3);
  677. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  678. T = _mm_add_epi32(X1, X0);
  679. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  680. T = _mm_add_epi32(X2, X1);
  681. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  682. T = _mm_add_epi32(X3, X2);
  683. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  684. X1 = _mm_shuffle_epi32(X1, 0x93);
  685. X2 = _mm_shuffle_epi32(X2, 0x4E);
  686. X3 = _mm_shuffle_epi32(X3, 0x39);
  687. T = _mm_add_epi32(X0, X1);
  688. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  689. T = _mm_add_epi32(X3, X0);
  690. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  691. T = _mm_add_epi32(X2, X3);
  692. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  693. T = _mm_add_epi32(X1, X2);
  694. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  695. X1 = _mm_shuffle_epi32(X1, 0x39);
  696. X2 = _mm_shuffle_epi32(X2, 0x4E);
  697. X3 = _mm_shuffle_epi32(X3, 0x93);
  698. // 2X round -------------------------------------------------------------
  699. T = _mm_add_epi32(X0, X3);
  700. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  701. T = _mm_add_epi32(X1, X0);
  702. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  703. T = _mm_add_epi32(X2, X1);
  704. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  705. T = _mm_add_epi32(X3, X2);
  706. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  707. X1 = _mm_shuffle_epi32(X1, 0x93);
  708. X2 = _mm_shuffle_epi32(X2, 0x4E);
  709. X3 = _mm_shuffle_epi32(X3, 0x39);
  710. T = _mm_add_epi32(X0, X1);
  711. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  712. T = _mm_add_epi32(X3, X0);
  713. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  714. T = _mm_add_epi32(X2, X3);
  715. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  716. T = _mm_add_epi32(X1, X2);
  717. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  718. X1 = _mm_shuffle_epi32(X1, 0x39);
  719. X2 = _mm_shuffle_epi32(X2, 0x4E);
  720. X3 = _mm_shuffle_epi32(X3, 0x93);
  721. // 2X round -------------------------------------------------------------
  722. T = _mm_add_epi32(X0, X3);
  723. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  724. T = _mm_add_epi32(X1, X0);
  725. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  726. T = _mm_add_epi32(X2, X1);
  727. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  728. T = _mm_add_epi32(X3, X2);
  729. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  730. X1 = _mm_shuffle_epi32(X1, 0x93);
  731. X2 = _mm_shuffle_epi32(X2, 0x4E);
  732. X3 = _mm_shuffle_epi32(X3, 0x39);
  733. T = _mm_add_epi32(X0, X1);
  734. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  735. T = _mm_add_epi32(X3, X0);
  736. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  737. T = _mm_add_epi32(X2, X3);
  738. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  739. T = _mm_add_epi32(X1, X2);
  740. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  741. X1 = _mm_shuffle_epi32(X1, 0x39);
  742. X2 = _mm_shuffle_epi32(X2, 0x4E);
  743. X3 = _mm_shuffle_epi32(X3, 0x93);
  744. // 2X round -------------------------------------------------------------
  745. T = _mm_add_epi32(X0, X3);
  746. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  747. T = _mm_add_epi32(X1, X0);
  748. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  749. T = _mm_add_epi32(X2, X1);
  750. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  751. T = _mm_add_epi32(X3, X2);
  752. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  753. X1 = _mm_shuffle_epi32(X1, 0x93);
  754. X2 = _mm_shuffle_epi32(X2, 0x4E);
  755. X3 = _mm_shuffle_epi32(X3, 0x39);
  756. T = _mm_add_epi32(X0, X1);
  757. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  758. T = _mm_add_epi32(X3, X0);
  759. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  760. T = _mm_add_epi32(X2, X3);
  761. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  762. T = _mm_add_epi32(X1, X2);
  763. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  764. X1 = _mm_shuffle_epi32(X1, 0x39);
  765. X2 = _mm_shuffle_epi32(X2, 0x4E);
  766. X3 = _mm_shuffle_epi32(X3, 0x93);
  767. // 2X round -------------------------------------------------------------
  768. T = _mm_add_epi32(X0, X3);
  769. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  770. T = _mm_add_epi32(X1, X0);
  771. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  772. T = _mm_add_epi32(X2, X1);
  773. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  774. T = _mm_add_epi32(X3, X2);
  775. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  776. X1 = _mm_shuffle_epi32(X1, 0x93);
  777. X2 = _mm_shuffle_epi32(X2, 0x4E);
  778. X3 = _mm_shuffle_epi32(X3, 0x39);
  779. T = _mm_add_epi32(X0, X1);
  780. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  781. T = _mm_add_epi32(X3, X0);
  782. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  783. T = _mm_add_epi32(X2, X3);
  784. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  785. T = _mm_add_epi32(X1, X2);
  786. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  787. X1 = _mm_shuffle_epi32(X1, 0x39);
  788. X2 = _mm_shuffle_epi32(X2, 0x4E);
  789. X3 = _mm_shuffle_epi32(X3, 0x93);
  790. // 2X round -------------------------------------------------------------
  791. T = _mm_add_epi32(X0, X3);
  792. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  793. T = _mm_add_epi32(X1, X0);
  794. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  795. T = _mm_add_epi32(X2, X1);
  796. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  797. T = _mm_add_epi32(X3, X2);
  798. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  799. X1 = _mm_shuffle_epi32(X1, 0x93);
  800. X2 = _mm_shuffle_epi32(X2, 0x4E);
  801. X3 = _mm_shuffle_epi32(X3, 0x39);
  802. T = _mm_add_epi32(X0, X1);
  803. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  804. T = _mm_add_epi32(X3, X0);
  805. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  806. T = _mm_add_epi32(X2, X3);
  807. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  808. T = _mm_add_epi32(X1, X2);
  809. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  810. X1 = _mm_shuffle_epi32(X1, 0x39);
  811. X2 = _mm_shuffle_epi32(X2, 0x4E);
  812. X3 = _mm_shuffle_epi32(X3, 0x93);
  813. // 2X round -------------------------------------------------------------
  814. T = _mm_add_epi32(X0, X3);
  815. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  816. T = _mm_add_epi32(X1, X0);
  817. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  818. T = _mm_add_epi32(X2, X1);
  819. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  820. T = _mm_add_epi32(X3, X2);
  821. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  822. X1 = _mm_shuffle_epi32(X1, 0x93);
  823. X2 = _mm_shuffle_epi32(X2, 0x4E);
  824. X3 = _mm_shuffle_epi32(X3, 0x39);
  825. T = _mm_add_epi32(X0, X1);
  826. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  827. T = _mm_add_epi32(X3, X0);
  828. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  829. T = _mm_add_epi32(X2, X3);
  830. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  831. T = _mm_add_epi32(X1, X2);
  832. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  833. X1 = _mm_shuffle_epi32(X1, 0x39);
  834. X2 = _mm_shuffle_epi32(X2, 0x4E);
  835. X3 = _mm_shuffle_epi32(X3, 0x93);
  836. // 2X round -------------------------------------------------------------
  837. T = _mm_add_epi32(X0, X3);
  838. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  839. T = _mm_add_epi32(X1, X0);
  840. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  841. T = _mm_add_epi32(X2, X1);
  842. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  843. T = _mm_add_epi32(X3, X2);
  844. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  845. X1 = _mm_shuffle_epi32(X1, 0x93);
  846. X2 = _mm_shuffle_epi32(X2, 0x4E);
  847. X3 = _mm_shuffle_epi32(X3, 0x39);
  848. T = _mm_add_epi32(X0, X1);
  849. X3 = _mm_xor_si128(_mm_xor_si128(X3, _mm_slli_epi32(T, 7)), _mm_srli_epi32(T, 25));
  850. T = _mm_add_epi32(X3, X0);
  851. X2 = _mm_xor_si128(_mm_xor_si128(X2, _mm_slli_epi32(T, 9)), _mm_srli_epi32(T, 23));
  852. T = _mm_add_epi32(X2, X3);
  853. X1 = _mm_xor_si128(_mm_xor_si128(X1, _mm_slli_epi32(T, 13)), _mm_srli_epi32(T, 19));
  854. T = _mm_add_epi32(X1, X2);
  855. X0 = _mm_xor_si128(_mm_xor_si128(X0, _mm_slli_epi32(T, 18)), _mm_srli_epi32(T, 14));
  856. X1 = _mm_shuffle_epi32(X1, 0x39);
  857. X2 = _mm_shuffle_epi32(X2, 0x4E);
  858. X3 = _mm_shuffle_epi32(X3, 0x93);
  859. X0 = _mm_add_epi32(X0s,X0);
  860. X1 = _mm_add_epi32(X1s,X1);
  861. X2 = _mm_add_epi32(X2s,X2);
  862. X3 = _mm_add_epi32(X3s,X3);
  863. __m128i k02 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X0, 32), _mm_srli_epi64(X3, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  864. __m128i k13 = _mm_shuffle_epi32(_mm_or_si128(_mm_slli_epi64(X1, 32), _mm_srli_epi64(X0, 32)), _MM_SHUFFLE(0, 1, 2, 3));
  865. __m128i k20 = _mm_or_si128(_mm_and_si128(X2, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X1, _S20SSECONSTANTS.maskHi32));
  866. __m128i k31 = _mm_or_si128(_mm_and_si128(X3, _S20SSECONSTANTS.maskLo32), _mm_and_si128(X2, _S20SSECONSTANTS.maskHi32));
  867. _mm_storeu_ps(reinterpret_cast<float *>(c),_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k02,k20),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m))))));
  868. _mm_storeu_ps(reinterpret_cast<float *>(c) + 4,_mm_castsi128_ps(_mm_xor_si128(_mm_unpackhi_epi64(k13,k31),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 4)))));
  869. _mm_storeu_ps(reinterpret_cast<float *>(c) + 8,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k20,k02),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 8)))));
  870. _mm_storeu_ps(reinterpret_cast<float *>(c) + 12,_mm_castsi128_ps(_mm_xor_si128(_mm_unpacklo_epi64(k31,k13),_mm_castps_si128(_mm_loadu_ps(reinterpret_cast<const float *>(m) + 12)))));
  871. if (!(++_state.i[8])) {
  872. ++_state.i[5]; // state reordered for SSE
  873. /* stopping at 2^70 bytes per nonce is user's responsibility */
  874. }
  875. #else
  876. x0 = j0;
  877. x1 = j1;
  878. x2 = j2;
  879. x3 = j3;
  880. x4 = j4;
  881. x5 = j5;
  882. x6 = j6;
  883. x7 = j7;
  884. x8 = j8;
  885. x9 = j9;
  886. x10 = j10;
  887. x11 = j11;
  888. x12 = j12;
  889. x13 = j13;
  890. x14 = j14;
  891. x15 = j15;
  892. // 2X round -------------------------------------------------------------
  893. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  894. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  895. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  896. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  897. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  898. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  899. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  900. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  901. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  902. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  903. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  904. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  905. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  906. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  907. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  908. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  909. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  910. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  911. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  912. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  913. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  914. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  915. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  916. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  917. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  918. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  919. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  920. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  921. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  922. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  923. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  924. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  925. // 2X round -------------------------------------------------------------
  926. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  927. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  928. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  929. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  930. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  931. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  932. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  933. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  934. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  935. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  936. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  937. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  938. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  939. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  940. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  941. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  942. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  943. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  944. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  945. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  946. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  947. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  948. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  949. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  950. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  951. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  952. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  953. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  954. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  955. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  956. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  957. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  958. // 2X round -------------------------------------------------------------
  959. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  960. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  961. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  962. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  963. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  964. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  965. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  966. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  967. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  968. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  969. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  970. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  971. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  972. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  973. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  974. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  975. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  976. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  977. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  978. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  979. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  980. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  981. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  982. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  983. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  984. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  985. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  986. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  987. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  988. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  989. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  990. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  991. // 2X round -------------------------------------------------------------
  992. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  993. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  994. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  995. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  996. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  997. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  998. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  999. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1000. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1001. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1002. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1003. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1004. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1005. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1006. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1007. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1008. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1009. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1010. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1011. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1012. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1013. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1014. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1015. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1016. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1017. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1018. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1019. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1020. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1021. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1022. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1023. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1024. // 2X round -------------------------------------------------------------
  1025. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1026. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1027. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1028. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1029. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1030. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1031. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1032. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1033. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1034. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1035. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1036. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1037. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1038. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1039. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1040. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1041. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1042. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1043. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1044. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1045. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1046. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1047. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1048. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1049. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1050. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1051. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1052. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1053. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1054. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1055. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1056. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1057. // 2X round -------------------------------------------------------------
  1058. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1059. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1060. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1061. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1062. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1063. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1064. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1065. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1066. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1067. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1068. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1069. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1070. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1071. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1072. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1073. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1074. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1075. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1076. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1077. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1078. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1079. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1080. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1081. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1082. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1083. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1084. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1085. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1086. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1087. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1088. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1089. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1090. // 2X round -------------------------------------------------------------
  1091. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1092. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1093. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1094. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1095. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1096. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1097. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1098. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1099. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1100. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1101. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1102. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1103. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1104. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1105. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1106. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1107. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1108. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1109. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1110. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1111. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1112. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1113. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1114. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1115. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1116. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1117. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1118. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1119. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1120. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1121. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1122. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1123. // 2X round -------------------------------------------------------------
  1124. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1125. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1126. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1127. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1128. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1129. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1130. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1131. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1132. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1133. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1134. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1135. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1136. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1137. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1138. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1139. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1140. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1141. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1142. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1143. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1144. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1145. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1146. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1147. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1148. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1149. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1150. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1151. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1152. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1153. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1154. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1155. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1156. // 2X round -------------------------------------------------------------
  1157. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1158. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1159. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1160. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1161. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1162. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1163. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1164. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1165. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1166. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1167. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1168. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1169. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1170. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1171. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1172. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1173. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1174. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1175. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1176. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1177. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1178. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1179. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1180. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1181. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1182. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1183. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1184. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1185. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1186. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1187. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1188. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1189. // 2X round -------------------------------------------------------------
  1190. x4 = XOR( x4,ROTATE(PLUS( x0,x12), 7));
  1191. x8 = XOR( x8,ROTATE(PLUS( x4, x0), 9));
  1192. x12 = XOR(x12,ROTATE(PLUS( x8, x4),13));
  1193. x0 = XOR( x0,ROTATE(PLUS(x12, x8),18));
  1194. x9 = XOR( x9,ROTATE(PLUS( x5, x1), 7));
  1195. x13 = XOR(x13,ROTATE(PLUS( x9, x5), 9));
  1196. x1 = XOR( x1,ROTATE(PLUS(x13, x9),13));
  1197. x5 = XOR( x5,ROTATE(PLUS( x1,x13),18));
  1198. x14 = XOR(x14,ROTATE(PLUS(x10, x6), 7));
  1199. x2 = XOR( x2,ROTATE(PLUS(x14,x10), 9));
  1200. x6 = XOR( x6,ROTATE(PLUS( x2,x14),13));
  1201. x10 = XOR(x10,ROTATE(PLUS( x6, x2),18));
  1202. x3 = XOR( x3,ROTATE(PLUS(x15,x11), 7));
  1203. x7 = XOR( x7,ROTATE(PLUS( x3,x15), 9));
  1204. x11 = XOR(x11,ROTATE(PLUS( x7, x3),13));
  1205. x15 = XOR(x15,ROTATE(PLUS(x11, x7),18));
  1206. x1 = XOR( x1,ROTATE(PLUS( x0, x3), 7));
  1207. x2 = XOR( x2,ROTATE(PLUS( x1, x0), 9));
  1208. x3 = XOR( x3,ROTATE(PLUS( x2, x1),13));
  1209. x0 = XOR( x0,ROTATE(PLUS( x3, x2),18));
  1210. x6 = XOR( x6,ROTATE(PLUS( x5, x4), 7));
  1211. x7 = XOR( x7,ROTATE(PLUS( x6, x5), 9));
  1212. x4 = XOR( x4,ROTATE(PLUS( x7, x6),13));
  1213. x5 = XOR( x5,ROTATE(PLUS( x4, x7),18));
  1214. x11 = XOR(x11,ROTATE(PLUS(x10, x9), 7));
  1215. x8 = XOR( x8,ROTATE(PLUS(x11,x10), 9));
  1216. x9 = XOR( x9,ROTATE(PLUS( x8,x11),13));
  1217. x10 = XOR(x10,ROTATE(PLUS( x9, x8),18));
  1218. x12 = XOR(x12,ROTATE(PLUS(x15,x14), 7));
  1219. x13 = XOR(x13,ROTATE(PLUS(x12,x15), 9));
  1220. x14 = XOR(x14,ROTATE(PLUS(x13,x12),13));
  1221. x15 = XOR(x15,ROTATE(PLUS(x14,x13),18));
  1222. x0 = PLUS(x0,j0);
  1223. x1 = PLUS(x1,j1);
  1224. x2 = PLUS(x2,j2);
  1225. x3 = PLUS(x3,j3);
  1226. x4 = PLUS(x4,j4);
  1227. x5 = PLUS(x5,j5);
  1228. x6 = PLUS(x6,j6);
  1229. x7 = PLUS(x7,j7);
  1230. x8 = PLUS(x8,j8);
  1231. x9 = PLUS(x9,j9);
  1232. x10 = PLUS(x10,j10);
  1233. x11 = PLUS(x11,j11);
  1234. x12 = PLUS(x12,j12);
  1235. x13 = PLUS(x13,j13);
  1236. x14 = PLUS(x14,j14);
  1237. x15 = PLUS(x15,j15);
  1238. U32TO8_LITTLE(c + 0,XOR(x0,U8TO32_LITTLE(m + 0)));
  1239. U32TO8_LITTLE(c + 4,XOR(x1,U8TO32_LITTLE(m + 4)));
  1240. U32TO8_LITTLE(c + 8,XOR(x2,U8TO32_LITTLE(m + 8)));
  1241. U32TO8_LITTLE(c + 12,XOR(x3,U8TO32_LITTLE(m + 12)));
  1242. U32TO8_LITTLE(c + 16,XOR(x4,U8TO32_LITTLE(m + 16)));
  1243. U32TO8_LITTLE(c + 20,XOR(x5,U8TO32_LITTLE(m + 20)));
  1244. U32TO8_LITTLE(c + 24,XOR(x6,U8TO32_LITTLE(m + 24)));
  1245. U32TO8_LITTLE(c + 28,XOR(x7,U8TO32_LITTLE(m + 28)));
  1246. U32TO8_LITTLE(c + 32,XOR(x8,U8TO32_LITTLE(m + 32)));
  1247. U32TO8_LITTLE(c + 36,XOR(x9,U8TO32_LITTLE(m + 36)));
  1248. U32TO8_LITTLE(c + 40,XOR(x10,U8TO32_LITTLE(m + 40)));
  1249. U32TO8_LITTLE(c + 44,XOR(x11,U8TO32_LITTLE(m + 44)));
  1250. U32TO8_LITTLE(c + 48,XOR(x12,U8TO32_LITTLE(m + 48)));
  1251. U32TO8_LITTLE(c + 52,XOR(x13,U8TO32_LITTLE(m + 52)));
  1252. U32TO8_LITTLE(c + 56,XOR(x14,U8TO32_LITTLE(m + 56)));
  1253. U32TO8_LITTLE(c + 60,XOR(x15,U8TO32_LITTLE(m + 60)));
  1254. if (!(++j8)) {
  1255. ++j9;
  1256. /* stopping at 2^70 bytes per nonce is user's responsibility */
  1257. }
  1258. #endif
  1259. if (bytes <= 64) {
  1260. if (bytes < 64) {
  1261. for (i = 0;i < bytes;++i)
  1262. ctarget[i] = c[i];
  1263. }
  1264. #ifndef ZT_SALSA20_SSE
  1265. _state.i[8] = j8;
  1266. _state.i[9] = j9;
  1267. #endif
  1268. return;
  1269. }
  1270. bytes -= 64;
  1271. c += 64;
  1272. m += 64;
  1273. }
  1274. }
  1275. } // namespace ZeroTier