blake.odin 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726
  1. package blake
  2. /*
  3. Copyright 2021 zhibog
  4. Made available under the BSD-3 license.
  5. List of contributors:
  6. zhibog, dotbmp: Initial implementation.
  7. Implementation of the BLAKE hashing algorithm, as defined in <https://web.archive.org/web/20190915215948/https://131002.net/blake>
  8. */
  9. import "core:os"
  10. import "core:io"
  11. /*
  12. High level API
  13. */
  14. DIGEST_SIZE_224 :: 28
  15. DIGEST_SIZE_256 :: 32
  16. DIGEST_SIZE_384 :: 48
  17. DIGEST_SIZE_512 :: 64
  18. // hash_string_224 will hash the given input and return the
  19. // computed hash
  20. hash_string_224 :: proc "contextless" (data: string) -> [DIGEST_SIZE_224]byte {
  21. return hash_bytes_224(transmute([]byte)(data))
  22. }
  23. // hash_bytes_224 will hash the given input and return the
  24. // computed hash
  25. hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte {
  26. hash: [DIGEST_SIZE_224]byte
  27. ctx: Blake256_Context
  28. ctx.is224 = true
  29. init(&ctx)
  30. update(&ctx, data)
  31. final(&ctx, hash[:])
  32. return hash
  33. }
  34. // hash_string_to_buffer_224 will hash the given input and assign the
  35. // computed hash to the second parameter.
  36. // It requires that the destination buffer is at least as big as the digest size
  37. hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
  38. hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
  39. }
  40. // hash_bytes_to_buffer_224 will hash the given input and write the
  41. // computed hash into the second parameter.
  42. // It requires that the destination buffer is at least as big as the digest size
  43. hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
  44. assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
  45. ctx: Blake256_Context
  46. ctx.is224 = true
  47. init(&ctx)
  48. update(&ctx, data)
  49. final(&ctx, hash)
  50. }
  51. // hash_stream_224 will read the stream in chunks and compute a
  52. // hash from its contents
  53. hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
  54. hash: [DIGEST_SIZE_224]byte
  55. ctx: Blake256_Context
  56. ctx.is224 = true
  57. init(&ctx)
  58. buf := make([]byte, 512)
  59. defer delete(buf)
  60. read := 1
  61. for read > 0 {
  62. read, _ = io.read(s, buf)
  63. if read > 0 {
  64. update(&ctx, buf[:read])
  65. }
  66. }
  67. final(&ctx, hash[:])
  68. return hash, true
  69. }
  70. // hash_file_224 will read the file provided by the given handle
  71. // and compute a hash
  72. hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
  73. if !load_at_once {
  74. return hash_stream_224(os.stream_from_handle(hd))
  75. } else {
  76. if buf, ok := os.read_entire_file(hd); ok {
  77. return hash_bytes_224(buf[:]), ok
  78. }
  79. }
  80. return [DIGEST_SIZE_224]byte{}, false
  81. }
  82. hash_224 :: proc {
  83. hash_stream_224,
  84. hash_file_224,
  85. hash_bytes_224,
  86. hash_string_224,
  87. hash_bytes_to_buffer_224,
  88. hash_string_to_buffer_224,
  89. }
  90. // hash_string_256 will hash the given input and return the
  91. // computed hash
  92. hash_string_256 :: proc "contextless" (data: string) -> [DIGEST_SIZE_256]byte {
  93. return hash_bytes_256(transmute([]byte)(data))
  94. }
  95. // hash_bytes_256 will hash the given input and return the
  96. // computed hash
  97. hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte {
  98. hash: [DIGEST_SIZE_256]byte
  99. ctx: Blake256_Context
  100. ctx.is224 = false
  101. init(&ctx)
  102. update(&ctx, data)
  103. final(&ctx, hash[:])
  104. return hash
  105. }
  106. // hash_string_to_buffer_256 will hash the given input and assign the
  107. // computed hash to the second parameter.
  108. // It requires that the destination buffer is at least as big as the digest size
  109. hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
  110. hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
  111. }
  112. // hash_bytes_to_buffer_256 will hash the given input and write the
  113. // computed hash into the second parameter.
  114. // It requires that the destination buffer is at least as big as the digest size
  115. hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
  116. assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
  117. ctx: Blake256_Context
  118. ctx.is224 = false
  119. init(&ctx)
  120. update(&ctx, data)
  121. final(&ctx, hash)
  122. }
  123. // hash_stream_256 will read the stream in chunks and compute a
  124. // hash from its contents
  125. hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
  126. hash: [DIGEST_SIZE_256]byte
  127. ctx: Blake256_Context
  128. ctx.is224 = false
  129. init(&ctx)
  130. buf := make([]byte, 512)
  131. defer delete(buf)
  132. read := 1
  133. for read > 0 {
  134. read, _ = io.read(s, buf)
  135. if read > 0 {
  136. update(&ctx, buf[:read])
  137. }
  138. }
  139. final(&ctx, hash[:])
  140. return hash, true
  141. }
  142. // hash_file_256 will read the file provided by the given handle
  143. // and compute a hash
  144. hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
  145. if !load_at_once {
  146. return hash_stream_256(os.stream_from_handle(hd))
  147. } else {
  148. if buf, ok := os.read_entire_file(hd); ok {
  149. return hash_bytes_256(buf[:]), ok
  150. }
  151. }
  152. return [DIGEST_SIZE_256]byte{}, false
  153. }
  154. hash_256 :: proc {
  155. hash_stream_256,
  156. hash_file_256,
  157. hash_bytes_256,
  158. hash_string_256,
  159. hash_bytes_to_buffer_256,
  160. hash_string_to_buffer_256,
  161. }
  162. // hash_string_384 will hash the given input and return the
  163. // computed hash
  164. hash_string_384 :: proc "contextless" (data: string) -> [DIGEST_SIZE_384]byte {
  165. return hash_bytes_384(transmute([]byte)(data))
  166. }
  167. // hash_bytes_384 will hash the given input and return the
  168. // computed hash
  169. hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte {
  170. hash: [DIGEST_SIZE_384]byte
  171. ctx: Blake512_Context
  172. ctx.is384 = true
  173. init(&ctx)
  174. update(&ctx, data)
  175. final(&ctx, hash[:])
  176. return hash
  177. }
  178. // hash_string_to_buffer_384 will hash the given input and assign the
  179. // computed hash to the second parameter.
  180. // It requires that the destination buffer is at least as big as the digest size
  181. hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
  182. hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
  183. }
  184. // hash_bytes_to_buffer_384 will hash the given input and write the
  185. // computed hash into the second parameter.
  186. // It requires that the destination buffer is at least as big as the digest size
  187. hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
  188. assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
  189. ctx: Blake512_Context
  190. ctx.is384 = true
  191. init(&ctx)
  192. update(&ctx, data)
  193. final(&ctx, hash)
  194. }
  195. // hash_stream_384 will read the stream in chunks and compute a
  196. // hash from its contents
  197. hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
  198. hash: [DIGEST_SIZE_384]byte
  199. ctx: Blake512_Context
  200. ctx.is384 = true
  201. init(&ctx)
  202. buf := make([]byte, 512)
  203. defer delete(buf)
  204. read := 1
  205. for read > 0 {
  206. read, _ = io.read(s, buf)
  207. if read > 0 {
  208. update(&ctx, buf[:read])
  209. }
  210. }
  211. final(&ctx, hash[:])
  212. return hash, true
  213. }
  214. // hash_file_384 will read the file provided by the given handle
  215. // and compute a hash
  216. hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
  217. if !load_at_once {
  218. return hash_stream_384(os.stream_from_handle(hd))
  219. } else {
  220. if buf, ok := os.read_entire_file(hd); ok {
  221. return hash_bytes_384(buf[:]), ok
  222. }
  223. }
  224. return [DIGEST_SIZE_384]byte{}, false
  225. }
  226. hash_384 :: proc {
  227. hash_stream_384,
  228. hash_file_384,
  229. hash_bytes_384,
  230. hash_string_384,
  231. hash_bytes_to_buffer_384,
  232. hash_string_to_buffer_384,
  233. }
  234. // hash_string_512 will hash the given input and return the
  235. // computed hash
  236. hash_string_512 :: proc "contextless" (data: string) -> [DIGEST_SIZE_512]byte {
  237. return hash_bytes_512(transmute([]byte)(data))
  238. }
  239. // hash_bytes_512 will hash the given input and return the
  240. // computed hash
  241. hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte {
  242. hash: [DIGEST_SIZE_512]byte
  243. ctx: Blake512_Context
  244. ctx.is384 = false
  245. init(&ctx)
  246. update(&ctx, data)
  247. final(&ctx, hash[:])
  248. return hash
  249. }
  250. // hash_string_to_buffer_512 will hash the given input and assign the
  251. // computed hash to the second parameter.
  252. // It requires that the destination buffer is at least as big as the digest size
  253. hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
  254. hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
  255. }
  256. // hash_bytes_to_buffer_512 will hash the given input and write the
  257. // computed hash into the second parameter.
  258. // It requires that the destination buffer is at least as big as the digest size
  259. hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
  260. assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
  261. ctx: Blake512_Context
  262. ctx.is384 = false
  263. init(&ctx)
  264. update(&ctx, data)
  265. final(&ctx, hash)
  266. }
  267. // hash_stream_512 will read the stream in chunks and compute a
  268. // hash from its contents
  269. hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
  270. hash: [DIGEST_SIZE_512]byte
  271. ctx: Blake512_Context
  272. ctx.is384 = false
  273. init(&ctx)
  274. buf := make([]byte, 512)
  275. defer delete(buf)
  276. read := 1
  277. for read > 0 {
  278. read, _ = io.read(s, buf)
  279. if read > 0 {
  280. update(&ctx, buf[:read])
  281. }
  282. }
  283. final(&ctx, hash[:])
  284. return hash, true
  285. }
  286. // hash_file_512 will read the file provided by the given handle
  287. // and compute a hash
  288. hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
  289. if !load_at_once {
  290. return hash_stream_512(os.stream_from_handle(hd))
  291. } else {
  292. if buf, ok := os.read_entire_file(hd); ok {
  293. return hash_bytes_512(buf[:]), ok
  294. }
  295. }
  296. return [DIGEST_SIZE_512]byte{}, false
  297. }
  298. hash_512 :: proc {
  299. hash_stream_512,
  300. hash_file_512,
  301. hash_bytes_512,
  302. hash_string_512,
  303. hash_bytes_to_buffer_512,
  304. hash_string_to_buffer_512,
  305. }
  306. /*
  307. Low level API
  308. */
  309. init :: proc "contextless" (ctx: ^$T) {
  310. when T == Blake256_Context {
  311. if ctx.is224 {
  312. ctx.h[0] = 0xc1059ed8
  313. ctx.h[1] = 0x367cd507
  314. ctx.h[2] = 0x3070dd17
  315. ctx.h[3] = 0xf70e5939
  316. ctx.h[4] = 0xffc00b31
  317. ctx.h[5] = 0x68581511
  318. ctx.h[6] = 0x64f98fa7
  319. ctx.h[7] = 0xbefa4fa4
  320. } else {
  321. ctx.h[0] = 0x6a09e667
  322. ctx.h[1] = 0xbb67ae85
  323. ctx.h[2] = 0x3c6ef372
  324. ctx.h[3] = 0xa54ff53a
  325. ctx.h[4] = 0x510e527f
  326. ctx.h[5] = 0x9b05688c
  327. ctx.h[6] = 0x1f83d9ab
  328. ctx.h[7] = 0x5be0cd19
  329. }
  330. } else when T == Blake512_Context {
  331. if ctx.is384 {
  332. ctx.h[0] = 0xcbbb9d5dc1059ed8
  333. ctx.h[1] = 0x629a292a367cd507
  334. ctx.h[2] = 0x9159015a3070dd17
  335. ctx.h[3] = 0x152fecd8f70e5939
  336. ctx.h[4] = 0x67332667ffc00b31
  337. ctx.h[5] = 0x8eb44a8768581511
  338. ctx.h[6] = 0xdb0c2e0d64f98fa7
  339. ctx.h[7] = 0x47b5481dbefa4fa4
  340. } else {
  341. ctx.h[0] = 0x6a09e667f3bcc908
  342. ctx.h[1] = 0xbb67ae8584caa73b
  343. ctx.h[2] = 0x3c6ef372fe94f82b
  344. ctx.h[3] = 0xa54ff53a5f1d36f1
  345. ctx.h[4] = 0x510e527fade682d1
  346. ctx.h[5] = 0x9b05688c2b3e6c1f
  347. ctx.h[6] = 0x1f83d9abfb41bd6b
  348. ctx.h[7] = 0x5be0cd19137e2179
  349. }
  350. }
  351. }
  352. update :: proc "contextless" (ctx: ^$T, data: []byte) {
  353. data := data
  354. when T == Blake256_Context {
  355. if ctx.nx > 0 {
  356. n := copy(ctx.x[ctx.nx:], data)
  357. ctx.nx += n
  358. if ctx.nx == BLOCKSIZE_256 {
  359. block256(ctx, ctx.x[:])
  360. ctx.nx = 0
  361. }
  362. data = data[n:]
  363. }
  364. if len(data) >= BLOCKSIZE_256 {
  365. n := len(data) &~ (BLOCKSIZE_256 - 1)
  366. block256(ctx, data[:n])
  367. data = data[n:]
  368. }
  369. if len(data) > 0 {
  370. ctx.nx = copy(ctx.x[:], data)
  371. }
  372. } else when T == Blake512_Context {
  373. if ctx.nx > 0 {
  374. n := copy(ctx.x[ctx.nx:], data)
  375. ctx.nx += n
  376. if ctx.nx == BLOCKSIZE_512 {
  377. block512(ctx, ctx.x[:])
  378. ctx.nx = 0
  379. }
  380. data = data[n:]
  381. }
  382. if len(data) >= BLOCKSIZE_512 {
  383. n := len(data) &~ (BLOCKSIZE_512 - 1)
  384. block512(ctx, data[:n])
  385. data = data[n:]
  386. }
  387. if len(data) > 0 {
  388. ctx.nx = copy(ctx.x[:], data)
  389. }
  390. }
  391. }
  392. final :: proc "contextless" (ctx: ^$T, hash: []byte) {
  393. when T == Blake256_Context {
  394. tmp: [65]byte
  395. } else when T == Blake512_Context {
  396. tmp: [129]byte
  397. }
  398. nx := u64(ctx.nx)
  399. tmp[0] = 0x80
  400. length := (ctx.t + nx) << 3
  401. when T == Blake256_Context {
  402. if nx == 55 {
  403. if ctx.is224 {
  404. write_additional(ctx, {0x80})
  405. } else {
  406. write_additional(ctx, {0x81})
  407. }
  408. } else {
  409. if nx < 55 {
  410. if nx == 0 {
  411. ctx.nullt = true
  412. }
  413. write_additional(ctx, tmp[0 : 55 - nx])
  414. } else {
  415. write_additional(ctx, tmp[0 : 64 - nx])
  416. write_additional(ctx, tmp[1:56])
  417. ctx.nullt = true
  418. }
  419. if ctx.is224 {
  420. write_additional(ctx, {0x00})
  421. } else {
  422. write_additional(ctx, {0x01})
  423. }
  424. }
  425. for i : uint = 0; i < 8; i += 1 {
  426. tmp[i] = byte(length >> (56 - 8 * i))
  427. }
  428. write_additional(ctx, tmp[0:8])
  429. h := ctx.h[:]
  430. if ctx.is224 {
  431. h = h[0:7]
  432. }
  433. for s, i in h {
  434. hash[i * 4] = byte(s >> 24)
  435. hash[i * 4 + 1] = byte(s >> 16)
  436. hash[i * 4 + 2] = byte(s >> 8)
  437. hash[i * 4 + 3] = byte(s)
  438. }
  439. } else when T == Blake512_Context {
  440. if nx == 111 {
  441. if ctx.is384 {
  442. write_additional(ctx, {0x80})
  443. } else {
  444. write_additional(ctx, {0x81})
  445. }
  446. } else {
  447. if nx < 111 {
  448. if nx == 0 {
  449. ctx.nullt = true
  450. }
  451. write_additional(ctx, tmp[0 : 111 - nx])
  452. } else {
  453. write_additional(ctx, tmp[0 : 128 - nx])
  454. write_additional(ctx, tmp[1:112])
  455. ctx.nullt = true
  456. }
  457. if ctx.is384 {
  458. write_additional(ctx, {0x00})
  459. } else {
  460. write_additional(ctx, {0x01})
  461. }
  462. }
  463. for i : uint = 0; i < 16; i += 1 {
  464. tmp[i] = byte(length >> (120 - 8 * i))
  465. }
  466. write_additional(ctx, tmp[0:16])
  467. h := ctx.h[:]
  468. if ctx.is384 {
  469. h = h[0:6]
  470. }
  471. for s, i in h {
  472. hash[i * 8] = byte(s >> 56)
  473. hash[i * 8 + 1] = byte(s >> 48)
  474. hash[i * 8 + 2] = byte(s >> 40)
  475. hash[i * 8 + 3] = byte(s >> 32)
  476. hash[i * 8 + 4] = byte(s >> 24)
  477. hash[i * 8 + 5] = byte(s >> 16)
  478. hash[i * 8 + 6] = byte(s >> 8)
  479. hash[i * 8 + 7] = byte(s)
  480. }
  481. }
  482. }
  483. SIZE_224 :: 28
  484. SIZE_256 :: 32
  485. SIZE_384 :: 48
  486. SIZE_512 :: 64
  487. BLOCKSIZE_256 :: 64
  488. BLOCKSIZE_512 :: 128
  489. Blake256_Context :: struct {
  490. h: [8]u32,
  491. s: [4]u32,
  492. t: u64,
  493. x: [64]byte,
  494. nx: int,
  495. is224: bool,
  496. nullt: bool,
  497. }
  498. Blake512_Context :: struct {
  499. h: [8]u64,
  500. s: [4]u64,
  501. t: u64,
  502. x: [128]byte,
  503. nx: int,
  504. is384: bool,
  505. nullt: bool,
  506. }
  507. SIGMA := [?]int {
  508. 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
  509. 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
  510. 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
  511. 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
  512. 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
  513. 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
  514. 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
  515. 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
  516. 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
  517. 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
  518. }
  519. U256 := [16]u32 {
  520. 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
  521. 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
  522. 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
  523. 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
  524. }
  525. U512 := [16]u64 {
  526. 0x243f6a8885a308d3, 0x13198a2e03707344, 0xa4093822299f31d0, 0x082efa98ec4e6c89,
  527. 0x452821e638d01377, 0xbe5466cf34e90c6c, 0xc0ac29b7c97c50dd, 0x3f84d5b5b5470917,
  528. 0x9216d5d98979fb1b, 0xd1310ba698dfb5ac, 0x2ffd72dbd01adfb7, 0xb8e1afed6a267e96,
  529. 0xba7c9045f12c7f99, 0x24a19947b3916cf7, 0x0801f2e2858efc16, 0x636920d871574e69,
  530. }
  531. G256 :: #force_inline proc "contextless" (a, b, c, d: u32, m: [16]u32, i, j: int) -> (u32, u32, u32, u32) {
  532. a, b, c, d := a, b, c, d
  533. a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
  534. a += b
  535. d ~= a
  536. d = d << (32 - 16) | d >> 16
  537. c += d
  538. b ~= c
  539. b = b << (32 - 12) | b >> 12
  540. a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j)]]
  541. a += b
  542. d ~= a
  543. d = d << (32 - 8) | d >> 8
  544. c += d
  545. b ~= c
  546. b = b << (32 - 7) | b >> 7
  547. return a, b, c, d
  548. }
  549. G512 :: #force_inline proc "contextless" (a, b, c, d: u64, m: [16]u64, i, j: int) -> (u64, u64, u64, u64) {
  550. a, b, c, d := a, b, c, d
  551. a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
  552. a += b
  553. d ~= a
  554. d = d << (64 - 32) | d >> 32
  555. c += d
  556. b ~= c
  557. b = b << (64 - 25) | b >> 25
  558. a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j)]]
  559. a += b
  560. d ~= a
  561. d = d << (64 - 16) | d >> 16
  562. c += d
  563. b ~= c
  564. b = b << (64 - 11) | b >> 11
  565. return a, b, c, d
  566. }
  567. block256 :: proc "contextless" (ctx: ^Blake256_Context, p: []byte) #no_bounds_check {
  568. i, j: int = ---, ---
  569. v, m: [16]u32 = ---, ---
  570. p := p
  571. for len(p) >= BLOCKSIZE_256 {
  572. v[0] = ctx.h[0]
  573. v[1] = ctx.h[1]
  574. v[2] = ctx.h[2]
  575. v[3] = ctx.h[3]
  576. v[4] = ctx.h[4]
  577. v[5] = ctx.h[5]
  578. v[6] = ctx.h[6]
  579. v[7] = ctx.h[7]
  580. v[8] = ctx.s[0] ~ U256[0]
  581. v[9] = ctx.s[1] ~ U256[1]
  582. v[10] = ctx.s[2] ~ U256[2]
  583. v[11] = ctx.s[3] ~ U256[3]
  584. v[12] = U256[4]
  585. v[13] = U256[5]
  586. v[14] = U256[6]
  587. v[15] = U256[7]
  588. ctx.t += 512
  589. if !ctx.nullt {
  590. v[12] ~= u32(ctx.t)
  591. v[13] ~= u32(ctx.t)
  592. v[14] ~= u32(ctx.t >> 32)
  593. v[15] ~= u32(ctx.t >> 32)
  594. }
  595. for i, j = 0, 0; i < 16; i, j = i+1, j+4 {
  596. m[i] = u32(p[j]) << 24 | u32(p[j + 1]) << 16 | u32(p[j + 2]) << 8 | u32(p[j + 3])
  597. }
  598. for i = 0; i < 14; i += 1 {
  599. v[0], v[4], v[8], v[12] = G256(v[0], v[4], v[8], v[12], m, i, 0)
  600. v[1], v[5], v[9], v[13] = G256(v[1], v[5], v[9], v[13], m, i, 1)
  601. v[2], v[6], v[10], v[14] = G256(v[2], v[6], v[10], v[14], m, i, 2)
  602. v[3], v[7], v[11], v[15] = G256(v[3], v[7], v[11], v[15], m, i, 3)
  603. v[0], v[5], v[10], v[15] = G256(v[0], v[5], v[10], v[15], m, i, 4)
  604. v[1], v[6], v[11], v[12] = G256(v[1], v[6], v[11], v[12], m, i, 5)
  605. v[2], v[7], v[8], v[13] = G256(v[2], v[7], v[8], v[13], m, i, 6)
  606. v[3], v[4], v[9], v[14] = G256(v[3], v[4], v[9], v[14], m, i, 7)
  607. }
  608. for i = 0; i < 8; i += 1 {
  609. ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
  610. }
  611. p = p[BLOCKSIZE_256:]
  612. }
  613. }
  614. block512 :: proc "contextless" (ctx: ^Blake512_Context, p: []byte) #no_bounds_check {
  615. i, j: int = ---, ---
  616. v, m: [16]u64 = ---, ---
  617. p := p
  618. for len(p) >= BLOCKSIZE_512 {
  619. v[0] = ctx.h[0]
  620. v[1] = ctx.h[1]
  621. v[2] = ctx.h[2]
  622. v[3] = ctx.h[3]
  623. v[4] = ctx.h[4]
  624. v[5] = ctx.h[5]
  625. v[6] = ctx.h[6]
  626. v[7] = ctx.h[7]
  627. v[8] = ctx.s[0] ~ U512[0]
  628. v[9] = ctx.s[1] ~ U512[1]
  629. v[10] = ctx.s[2] ~ U512[2]
  630. v[11] = ctx.s[3] ~ U512[3]
  631. v[12] = U512[4]
  632. v[13] = U512[5]
  633. v[14] = U512[6]
  634. v[15] = U512[7]
  635. ctx.t += 1024
  636. if !ctx.nullt {
  637. v[12] ~= ctx.t
  638. v[13] ~= ctx.t
  639. v[14] ~= 0
  640. v[15] ~= 0
  641. }
  642. for i, j = 0, 0; i < 16; i, j = i + 1, j + 8 {
  643. m[i] = u64(p[j]) << 56 | u64(p[j + 1]) << 48 | u64(p[j + 2]) << 40 | u64(p[j + 3]) << 32 |
  644. u64(p[j + 4]) << 24 | u64(p[j + 5]) << 16 | u64(p[j + 6]) << 8 | u64(p[j + 7])
  645. }
  646. for i = 0; i < 16; i += 1 {
  647. v[0], v[4], v[8], v[12] = G512(v[0], v[4], v[8], v[12], m, i, 0)
  648. v[1], v[5], v[9], v[13] = G512(v[1], v[5], v[9], v[13], m, i, 1)
  649. v[2], v[6], v[10], v[14] = G512(v[2], v[6], v[10], v[14], m, i, 2)
  650. v[3], v[7], v[11], v[15] = G512(v[3], v[7], v[11], v[15], m, i, 3)
  651. v[0], v[5], v[10], v[15] = G512(v[0], v[5], v[10], v[15], m, i, 4)
  652. v[1], v[6], v[11], v[12] = G512(v[1], v[6], v[11], v[12], m, i, 5)
  653. v[2], v[7], v[8], v[13] = G512(v[2], v[7], v[8], v[13], m, i, 6)
  654. v[3], v[4], v[9], v[14] = G512(v[3], v[4], v[9], v[14], m, i, 7)
  655. }
  656. for i = 0; i < 8; i += 1 {
  657. ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
  658. }
  659. p = p[BLOCKSIZE_512:]
  660. }
  661. }
  662. write_additional :: proc "contextless" (ctx: ^$T, data: []byte) {
  663. ctx.t -= u64(len(data)) << 3
  664. update(ctx, data)
  665. }