Browse Source

Merge branch 'odin-lang:master' into master

Michael Kutowski 1 year ago
parent
commit
2af2a035dc
62 changed files with 3138 additions and 13071 deletions
  1. 22 0
      .gitignore
  2. 41 50
      core/crypto/README.md
  3. 134 62
      core/crypto/_blake2/blake2.odin
  4. 20 33
      core/crypto/_fiat/field_poly1305/field.odin
  5. 152 138
      core/crypto/_sha3/sha3.odin
  6. 0 410
      core/crypto/_tiger/tiger.odin
  7. 0 726
      core/crypto/blake/blake.odin
  8. 61 61
      core/crypto/blake2b/blake2b.odin
  9. 61 61
      core/crypto/blake2s/blake2s.odin
  10. 135 180
      core/crypto/chacha20/chacha20.odin
  11. 5 5
      core/crypto/chacha20poly1305/chacha20poly1305.odin
  12. 0 382
      core/crypto/gost/gost.odin
  13. 0 653
      core/crypto/groestl/groestl.odin
  14. 0 1814
      core/crypto/haval/haval.odin
  15. 0 584
      core/crypto/jh/jh.odin
  16. 0 374
      core/crypto/keccak/keccak.odin
  17. 10 0
      core/crypto/legacy/README.md
  18. 377 0
      core/crypto/legacy/keccak/keccak.odin
  19. 295 0
      core/crypto/legacy/md5/md5.odin
  20. 252 0
      core/crypto/legacy/sha1/sha1.odin
  21. 0 182
      core/crypto/md2/md2.odin
  22. 0 263
      core/crypto/md4/md4.odin
  23. 0 285
      core/crypto/md5/md5.odin
  24. 4 4
      core/crypto/poly1305/poly1305.odin
  25. 0 919
      core/crypto/ripemd/ripemd.odin
  26. 0 246
      core/crypto/sha1/sha1.odin
  27. 529 409
      core/crypto/sha2/sha2.odin
  28. 188 184
      core/crypto/sha3/sha3.odin
  29. 102 103
      core/crypto/shake/shake.odin
  30. 209 181
      core/crypto/siphash/siphash.odin
  31. 204 175
      core/crypto/sm3/sm3.odin
  32. 0 517
      core/crypto/streebog/streebog.odin
  33. 0 280
      core/crypto/tiger/tiger.odin
  34. 0 280
      core/crypto/tiger2/tiger2.odin
  35. 0 146
      core/crypto/util/util.odin
  36. 0 806
      core/crypto/whirlpool/whirlpool.odin
  37. 16 16
      core/os/file_windows.odin
  38. 6 3
      core/os/stream.odin
  39. 3 29
      examples/all/all_main.odin
  40. 3 17
      examples/all/all_vendor.odin
  41. 1 1
      src/string.cpp
  42. 19 675
      tests/core/crypto/test_core_crypto.odin
  43. 3 222
      tests/vendor/botan/test_vendor_botan.odin
  44. 27 28
      vendor/botan/README.md
  45. 0 14
      vendor/botan/bindings/botan.odin
  46. 18 21
      vendor/botan/blake2b/blake2b.odin
  47. 0 121
      vendor/botan/gost/gost.odin
  48. 10 0
      vendor/botan/legacy/README.md
  49. 19 22
      vendor/botan/legacy/keccak/keccak.odin
  50. 19 22
      vendor/botan/legacy/md5/md5.odin
  51. 19 22
      vendor/botan/legacy/sha1/sha1.odin
  52. 0 121
      vendor/botan/md4/md4.odin
  53. 0 121
      vendor/botan/ripemd/ripemd.odin
  54. 60 72
      vendor/botan/sha2/sha2.odin
  55. 60 72
      vendor/botan/sha3/sha3.odin
  56. 32 38
      vendor/botan/shake/shake.odin
  57. 4 4
      vendor/botan/siphash/siphash.odin
  58. 0 286
      vendor/botan/skein512/skein512.odin
  59. 18 21
      vendor/botan/sm3/sm3.odin
  60. 0 204
      vendor/botan/streebog/streebog.odin
  61. 0 285
      vendor/botan/tiger/tiger.odin
  62. 0 121
      vendor/botan/whirlpool/whirlpool.odin

+ 22 - 0
.gitignore

@@ -25,7 +25,29 @@ bld/
 tests/documentation/verify/
 tests/documentation/verify/
 tests/documentation/all.odin-doc
 tests/documentation/all.odin-doc
 tests/internal/test_map
 tests/internal/test_map
+tests/internal/test_pow
 tests/internal/test_rtti
 tests/internal/test_rtti
+tests/core/test_core_compress
+tests/core/test_core_filepath
+tests/core/test_core_fmt
+tests/core/test_core_i18n
+tests/core/test_core_image
+tests/core/test_core_libc
+tests/core/test_core_match
+tests/core/test_core_math
+tests/core/test_core_net
+tests/core/test_core_os_exit
+tests/core/test_core_reflect
+tests/core/test_core_strings
+tests/core/test_crypto_hash
+tests/core/test_hash
+tests/core/test_hxa
+tests/core/test_json
+tests/core/test_linalg_glsl_math
+tests/core/test_noise
+tests/core/test_varint
+tests/core/test_xml
+tests/vendor/vendor_botan
 # Visual Studio 2015 cache/options directory
 # Visual Studio 2015 cache/options directory
 .vs/
 .vs/
 # Visual Studio Code options directory
 # Visual Studio Code options directory

+ 41 - 50
core/crypto/README.md

@@ -1,95 +1,86 @@
 # crypto
 # crypto
-A crypto library for the Odin language
+
+A cryptography library for the Odin language
 
 
 ## Supported
 ## Supported
+
 This library offers various algorithms implemented in Odin.
 This library offers various algorithms implemented in Odin.
-Please see the chart below for the options.  
+Please see the chart below for some of the options.
 
 
 ## Hashing algorithms
 ## Hashing algorithms
+
 | Algorithm                                                                                                    |                  |
 | Algorithm                                                                                                    |                  |
 |:-------------------------------------------------------------------------------------------------------------|:-----------------|
 |:-------------------------------------------------------------------------------------------------------------|:-----------------|
-| [BLAKE](https://web.archive.org/web/20190915215948/https://131002.net/blake)                                 | ✔️ |
 | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | ✔️ |
 | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | ✔️ |
 | [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | ✔️ |
 | [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | ✔️ |
-| [GOST](https://datatracker.ietf.org/doc/html/rfc5831)                                                        | ✔️ |
-| [Grøstl](http://www.groestl.info/Groestl.zip)                                                                | ✔️ |
-| [HAVAL](https://web.archive.org/web/20150111210116/http://labs.calyptix.com/haval.php)                       | ✔️ |
-| [JH](https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html)                                               | ✔️ |
-| [Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                           | ✔️ |
-| [MD2](https://datatracker.ietf.org/doc/html/rfc1319)                                                         | ✔️ |
-| [MD4](https://datatracker.ietf.org/doc/html/rfc1320)                                                         | ✔️ |
-| [MD5](https://datatracker.ietf.org/doc/html/rfc1321)                                                         | ✔️ |
-| [RIPEMD](https://homes.esat.kuleuven.be/~bosselae/ripemd160.html)                                            | ✔️ |
-| [SHA-1](https://datatracker.ietf.org/doc/html/rfc3174)                                                       | ✔️ |
 | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
 | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | ✔️ |
 | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | ✔️ |
 | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | ✔️ |
 | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | ✔️ |
 | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | ✔️ |
 | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02)                                           | ✔️ |
 | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02)                                           | ✔️ |
-| [Streebog](https://datatracker.ietf.org/doc/html/rfc6986)                                                    | ✔️ |
-| [Tiger](https://www.cs.technion.ac.il/~biham/Reports/Tiger/)                                                 | ✔️ |
-| [Tiger2](https://www.cs.technion.ac.il/~biham/Reports/Tiger/)                                                | ✔️ |
-| [Whirlpool](https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html)  | ✔️ |
+| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                    | ✔️ |
+| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321)                                                  | ✔️ |
+| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174)                                                | ✔️ |
 
 
 #### High level API
 #### High level API
-Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.  
+
+Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
 Included in these groups are six procedures.
 Included in these groups are six procedures.
-* `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
-* `hash_bytes` - Hash a given byte slice and return the computed hash
-* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
-* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
-* `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
-* `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
+- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
+- `hash_bytes` - Hash a given byte slice and return the computed hash
+- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
+- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
+- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
+- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
 
 
-\* On some algorithms there is another part to the name, since they might offer control about additional parameters.  
-For instance, `HAVAL` offers different sizes as well as three different round amounts.  
-Computing a 256-bit hash with 3 rounds is therefore achieved by calling `haval.hash_256_3(...)`.
+\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
+For instance, `SHA-2` offers different sizes.
+Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`.
 
 
 #### Low level API
 #### Low level API
+
 The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
 The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
 You may also directly call them, if you wish.
 You may also directly call them, if you wish.
 
 
 #### Example
 #### Example
+
 ```odin
 ```odin
 package crypto_example
 package crypto_example
 
 
 // Import the desired package
 // Import the desired package
-import "core:crypto/md4"
+import "core:crypto/blake2b"
 
 
 main :: proc() {
 main :: proc() {
     input := "foo"
     input := "foo"
 
 
     // Compute the hash, using the high level API
     // Compute the hash, using the high level API
-    computed_hash := md4.hash(input)
+    computed_hash := blake2b.hash(input)
 
 
     // Variant that takes a destination buffer, instead of returning the computed hash
     // Variant that takes a destination buffer, instead of returning the computed hash
-    hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
-    md4.hash(input, hash[:])
+    hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
+    blake2b.hash(input, hash[:])
 
 
     // Compute the hash, using the low level API
     // Compute the hash, using the low level API
-    ctx: md4.Md4_Context
-    computed_hash_low: [16]byte
-    md4.init(&ctx)
-    md4.update(&ctx, transmute([]byte)input)
-    md4.final(&ctx, computed_hash_low[:])
+    ctx: blake2b.Context
+    computed_hash_low: [blake2b.DIGEST_SIZE]byte
+    blake2b.init(&ctx)
+    blake2b.update(&ctx, transmute([]byte)input)
+    blake2b.final(&ctx, computed_hash_low[:])
 }
 }
 ```
 ```
 For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
 For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
 
 
-#### Thread safety
-The crypto package is not thread-safe at the moment. This may change in the future.
+## Implementation considerations
 
 
-### Disclaimer
-The algorithms were ported out of curiosity and due to interest in the field.
-We have not had any of the code verified by a third party or tested/fuzzed by any automatic means.
-Wherever we were able to find official test vectors, those were used to verify the implementation.
-We do not recommend using them in a production environment, without any additional testing and/or verification.
+- The crypto packages are not thread-safe.
+- Best-effort is make to mitigate timing side-channels on reasonable
+  architectures. Architectures that are known to be unreasonable include
+  but are not limited to i386, i486, and WebAssembly.
+- Some but not all of the packages attempt to santize sensitive data,
+  however this is not done consistently through the library at the moment.
+  As Thomas Pornin puts it "In general, such memory cleansing is a fool's
+  quest."
+- All of these packages have not received independent third party review.
 
 
-### ToDo
-* Ciphers (Symmetric, Asymmetric)
-* MACs (Message Authentication Code)
-* CSPRNGs (Cryptographically Secure PseudoRandom Number Generator)
-* KDFs (Key Derivation Function)
-* KEAs (Key Exchange Algorithm)
+## License
 
 
-### License
 This library is made available under the BSD-3 license.
 This library is made available under the BSD-3 license.

+ 134 - 62
core/crypto/_blake2/blake2.odin

@@ -10,12 +10,12 @@ package _blake2
     Implementation of the BLAKE2 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/rfc7693> and <https://www.blake2.net/>
     Implementation of the BLAKE2 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/rfc7693> and <https://www.blake2.net/>
 */
 */
 
 
-import "../util"
+import "core:encoding/endian"
 
 
-BLAKE2S_BLOCK_SIZE  :: 64
-BLAKE2S_SIZE        :: 32
-BLAKE2B_BLOCK_SIZE  :: 128
-BLAKE2B_SIZE        :: 64
+BLAKE2S_BLOCK_SIZE :: 64
+BLAKE2S_SIZE :: 32
+BLAKE2B_BLOCK_SIZE :: 128
+BLAKE2B_SIZE :: 64
 
 
 Blake2s_Context :: struct {
 Blake2s_Context :: struct {
 	h:            [8]u32,
 	h:            [8]u32,
@@ -28,7 +28,9 @@ Blake2s_Context :: struct {
 	is_keyed:     bool,
 	is_keyed:     bool,
 	size:         byte,
 	size:         byte,
 	is_last_node: bool,
 	is_last_node: bool,
-	cfg:		  Blake2_Config,
+	cfg:          Blake2_Config,
+
+	is_initialized: bool,
 }
 }
 
 
 Blake2b_Context :: struct {
 Blake2b_Context :: struct {
@@ -42,15 +44,19 @@ Blake2b_Context :: struct {
 	is_keyed:     bool,
 	is_keyed:     bool,
 	size:         byte,
 	size:         byte,
 	is_last_node: bool,
 	is_last_node: bool,
-	cfg:		  Blake2_Config,
+	cfg:          Blake2_Config,
+
+	is_initialized: bool,
 }
 }
 
 
 Blake2_Config :: struct {
 Blake2_Config :: struct {
-    size:   byte,
-	key:    []byte, 
-	salt:   []byte, 
+	size:   byte,
+	key:    []byte,
+	salt:   []byte,
 	person: []byte,
 	person: []byte,
-	tree:   union{Blake2_Tree},
+	tree:   union {
+		Blake2_Tree,
+	},
 }
 }
 
 
 Blake2_Tree :: struct {
 Blake2_Tree :: struct {
@@ -63,11 +69,13 @@ Blake2_Tree :: struct {
 	is_last_node:    bool,
 	is_last_node:    bool,
 }
 }
 
 
+@(private)
 BLAKE2S_IV := [8]u32 {
 BLAKE2S_IV := [8]u32 {
 	0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
 	0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
 	0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
 	0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
 }
 }
 
 
+@(private)
 BLAKE2B_IV := [8]u64 {
 BLAKE2B_IV := [8]u64 {
 	0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
 	0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
 	0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
 	0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
@@ -78,8 +86,14 @@ BLAKE2B_IV := [8]u64 {
 init :: proc(ctx: ^$T) {
 init :: proc(ctx: ^$T) {
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
 		block_size :: BLAKE2S_BLOCK_SIZE
 		block_size :: BLAKE2S_BLOCK_SIZE
+		max_size :: BLAKE2S_SIZE
 	} else when T == Blake2b_Context {
 	} else when T == Blake2b_Context {
 		block_size :: BLAKE2B_BLOCK_SIZE
 		block_size :: BLAKE2B_BLOCK_SIZE
+		max_size :: BLAKE2B_SIZE
+	}
+
+	if ctx.cfg.size > max_size {
+		panic("blake2: requested output size exceeeds algorithm max")
 	}
 	}
 
 
 	p := make([]byte, block_size)
 	p := make([]byte, block_size)
@@ -106,10 +120,10 @@ init :: proc(ctx: ^$T) {
 	if ctx.cfg.tree != nil {
 	if ctx.cfg.tree != nil {
 		p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
 		p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
 		p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
 		p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
-		util.PUT_U32_LE(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
+		endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
-			p[8]  = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
-			p[9]  = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
+			p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
+			p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
 			p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
 			p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
 			p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
 			p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
 			p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
 			p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
@@ -117,7 +131,7 @@ init :: proc(ctx: ^$T) {
 			p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
 			p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
 			p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
 			p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
 		} else when T == Blake2b_Context {
 		} else when T == Blake2b_Context {
-			util.PUT_U64_LE(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
+			endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
 			p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
 			p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
 			p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
 			p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
 		}
 		}
@@ -127,10 +141,10 @@ init :: proc(ctx: ^$T) {
 	ctx.size = ctx.cfg.size
 	ctx.size = ctx.cfg.size
 	for i := 0; i < 8; i += 1 {
 	for i := 0; i < 8; i += 1 {
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
-			ctx.h[i] = BLAKE2S_IV[i] ~ util.U32_LE(p[i * 4:])
+			ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
 		}
 		}
 		when T == Blake2b_Context {
 		when T == Blake2b_Context {
-			ctx.h[i] = BLAKE2B_IV[i] ~ util.U64_LE(p[i * 8:])
+			ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
 		}
 		}
 	}
 	}
 	if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
 	if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
@@ -142,13 +156,19 @@ init :: proc(ctx: ^$T) {
 		ctx.is_keyed = true
 		ctx.is_keyed = true
 	}
 	}
 	copy(ctx.ih[:], ctx.h[:])
 	copy(ctx.ih[:], ctx.h[:])
-	copy(ctx.h[:],  ctx.ih[:])
+	copy(ctx.h[:], ctx.ih[:])
 	if ctx.is_keyed {
 	if ctx.is_keyed {
 		update(ctx, ctx.padded_key[:])
 		update(ctx, ctx.padded_key[:])
 	}
 	}
+
+	ctx.nx = 0
+
+	ctx.is_initialized = true
 }
 }
 
 
-update :: proc "contextless" (ctx: ^$T, p: []byte) {
+update :: proc(ctx: ^$T, p: []byte) {
+	assert(ctx.is_initialized)
+
 	p := p
 	p := p
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
 		block_size :: BLAKE2S_BLOCK_SIZE
 		block_size :: BLAKE2S_BLOCK_SIZE
@@ -174,15 +194,25 @@ update :: proc "contextless" (ctx: ^$T, p: []byte) {
 	ctx.nx += copy(ctx.x[ctx.nx:], p)
 	ctx.nx += copy(ctx.x[ctx.nx:], p)
 }
 }
 
 
-final :: proc "contextless" (ctx: ^$T, hash: []byte) {
+final :: proc(ctx: ^$T, hash: []byte) {
+	assert(ctx.is_initialized)
+
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
+		if len(hash) < int(ctx.cfg.size) {
+			panic("crypto/blake2s: invalid destination digest size")
+		}
 		blake2s_final(ctx, hash)
 		blake2s_final(ctx, hash)
-	}
-	when T == Blake2b_Context {
+	} else when T == Blake2b_Context {
+		if len(hash) < int(ctx.cfg.size) {
+			panic("crypto/blake2b: invalid destination digest size")
+		}
 		blake2b_final(ctx, hash)
 		blake2b_final(ctx, hash)
 	}
 	}
+
+	ctx.is_initialized = false
 }
 }
 
 
+@(private)
 blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) {
 blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) {
 	if ctx.is_keyed {
 	if ctx.is_keyed {
 		for i := 0; i < len(ctx.padded_key); i += 1 {
 		for i := 0; i < len(ctx.padded_key); i += 1 {
@@ -203,16 +233,14 @@ blake2s_final :: proc "contextless" (ctx: ^Blake2s_Context, hash: []byte) {
 
 
 	blocks(ctx, ctx.x[:])
 	blocks(ctx, ctx.x[:])
 
 
-	j := 0
-	for s, _ in ctx.h[:(ctx.size - 1) / 4 + 1] {
-		hash[j + 0] = byte(s >> 0)
-		hash[j + 1] = byte(s >> 8)
-		hash[j + 2] = byte(s >> 16)
-		hash[j + 3] = byte(s >> 24)
-		j += 4
+	dst: [BLAKE2S_SIZE]byte
+	for i := 0; i < BLAKE2S_SIZE / 4; i += 1 {
+		endian.unchecked_put_u32le(dst[i * 4:], ctx.h[i])
 	}
 	}
+	copy(hash, dst[:])
 }
 }
 
 
+@(private)
 blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) {
 blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) {
 	if ctx.is_keyed {
 	if ctx.is_keyed {
 		for i := 0; i < len(ctx.padded_key); i += 1 {
 		for i := 0; i < len(ctx.padded_key); i += 1 {
@@ -229,56 +257,52 @@ blake2b_final :: proc "contextless" (ctx: ^Blake2b_Context, hash: []byte) {
 	ctx.f[0] = 0xffffffffffffffff
 	ctx.f[0] = 0xffffffffffffffff
 	if ctx.is_last_node {
 	if ctx.is_last_node {
 		ctx.f[1] = 0xffffffffffffffff
 		ctx.f[1] = 0xffffffffffffffff
-	} 
+	}
 
 
 	blocks(ctx, ctx.x[:])
 	blocks(ctx, ctx.x[:])
 
 
-	j := 0
-	for s, _ in ctx.h[:(ctx.size - 1) / 8 + 1] {
-		hash[j + 0] = byte(s >> 0)
-		hash[j + 1] = byte(s >> 8)
-		hash[j + 2] = byte(s >> 16)
-		hash[j + 3] = byte(s >> 24)
-		hash[j + 4] = byte(s >> 32)
-		hash[j + 5] = byte(s >> 40)
-		hash[j + 6] = byte(s >> 48)
-		hash[j + 7] = byte(s >> 56)
-		j += 8
+	dst: [BLAKE2B_SIZE]byte
+	for i := 0; i < BLAKE2B_SIZE / 8; i += 1 {
+		endian.unchecked_put_u64le(dst[i * 8:], ctx.h[i])
 	}
 	}
+	copy(hash, dst[:])
 }
 }
 
 
+@(private)
 blocks :: proc "contextless" (ctx: ^$T, p: []byte) {
 blocks :: proc "contextless" (ctx: ^$T, p: []byte) {
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
 		blake2s_blocks(ctx, p)
 		blake2s_blocks(ctx, p)
-	}
-	when T == Blake2b_Context {
+	} else when T == Blake2b_Context {
 		blake2b_blocks(ctx, p)
 		blake2b_blocks(ctx, p)
 	}
 	}
 }
 }
 
 
+@(private)
 blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []byte) {
 blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []byte) {
-	h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
+	h0, h1, h2, h3, h4, h5, h6, h7 :=
+		ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
 	p := p
 	p := p
 	for len(p) >= BLAKE2S_BLOCK_SIZE {
 	for len(p) >= BLAKE2S_BLOCK_SIZE {
 		ctx.t[0] += BLAKE2S_BLOCK_SIZE
 		ctx.t[0] += BLAKE2S_BLOCK_SIZE
 		if ctx.t[0] < BLAKE2S_BLOCK_SIZE {
 		if ctx.t[0] < BLAKE2S_BLOCK_SIZE {
 			ctx.t[1] += 1
 			ctx.t[1] += 1
-		} 
+		}
 		v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
 		v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
-		v8  := BLAKE2S_IV[0]
-		v9  := BLAKE2S_IV[1]
+		v8 := BLAKE2S_IV[0]
+		v9 := BLAKE2S_IV[1]
 		v10 := BLAKE2S_IV[2]
 		v10 := BLAKE2S_IV[2]
 		v11 := BLAKE2S_IV[3]
 		v11 := BLAKE2S_IV[3]
 		v12 := BLAKE2S_IV[4] ~ ctx.t[0]
 		v12 := BLAKE2S_IV[4] ~ ctx.t[0]
 		v13 := BLAKE2S_IV[5] ~ ctx.t[1]
 		v13 := BLAKE2S_IV[5] ~ ctx.t[1]
 		v14 := BLAKE2S_IV[6] ~ ctx.f[0]
 		v14 := BLAKE2S_IV[6] ~ ctx.f[0]
 		v15 := BLAKE2S_IV[7] ~ ctx.f[1]
 		v15 := BLAKE2S_IV[7] ~ ctx.f[1]
-		m: [16]u32
-		j := 0
+
+		m: [16]u32 = ---
 		for i := 0; i < 16; i += 1 {
 		for i := 0; i < 16; i += 1 {
-			m[i] = u32(p[j]) | u32(p[j + 1]) << 8 | u32(p[j + 2]) << 16 | u32(p[j + 3]) << 24
-			j += 4
+			m[i] = endian.unchecked_get_u32le(p[i * 4:])
 		}
 		}
+
+		// Round 1
 		v0 += m[0]
 		v0 += m[0]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -391,6 +415,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 2
 		v0 += m[14]
 		v0 += m[14]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -503,6 +529,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 3
 		v0 += m[11]
 		v0 += m[11]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -615,6 +643,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 4
 		v0 += m[7]
 		v0 += m[7]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -727,6 +757,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 5
 		v0 += m[9]
 		v0 += m[9]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -839,6 +871,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 6
 		v0 += m[2]
 		v0 += m[2]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -951,6 +985,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 7
 		v0 += m[12]
 		v0 += m[12]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1063,6 +1099,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 8
 		v0 += m[13]
 		v0 += m[13]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1175,6 +1213,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 9
 		v0 += m[6]
 		v0 += m[6]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1287,6 +1327,8 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
+		// Round 10
 		v0 += m[10]
 		v0 += m[10]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1399,6 +1441,7 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (32 - 7) | v5 >> 7
 		v5 = v5 << (32 - 7) | v5 >> 7
+
 		h0 ~= v0 ~ v8
 		h0 ~= v0 ~ v8
 		h1 ~= v1 ~ v9
 		h1 ~= v1 ~ v9
 		h2 ~= v2 ~ v10
 		h2 ~= v2 ~ v10
@@ -1407,19 +1450,23 @@ blake2s_blocks :: #force_inline proc "contextless" (ctx: ^Blake2s_Context, p: []
 		h5 ~= v5 ~ v13
 		h5 ~= v5 ~ v13
 		h6 ~= v6 ~ v14
 		h6 ~= v6 ~ v14
 		h7 ~= v7 ~ v15
 		h7 ~= v7 ~ v15
+
 		p = p[BLAKE2S_BLOCK_SIZE:]
 		p = p[BLAKE2S_BLOCK_SIZE:]
 	}
 	}
-	ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7
+	ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] =
+		h0, h1, h2, h3, h4, h5, h6, h7
 }
 }
 
 
+@(private)
 blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []byte) {
 blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []byte) {
-	h0, h1, h2, h3, h4, h5, h6, h7 := ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
+	h0, h1, h2, h3, h4, h5, h6, h7 :=
+		ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7]
 	p := p
 	p := p
 	for len(p) >= BLAKE2B_BLOCK_SIZE {
 	for len(p) >= BLAKE2B_BLOCK_SIZE {
 		ctx.t[0] += BLAKE2B_BLOCK_SIZE
 		ctx.t[0] += BLAKE2B_BLOCK_SIZE
 		if ctx.t[0] < BLAKE2B_BLOCK_SIZE {
 		if ctx.t[0] < BLAKE2B_BLOCK_SIZE {
-			ctx.t[1]+=1
-		} 
+			ctx.t[1] += 1
+		}
 		v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
 		v0, v1, v2, v3, v4, v5, v6, v7 := h0, h1, h2, h3, h4, h5, h6, h7
 		v8 := BLAKE2B_IV[0]
 		v8 := BLAKE2B_IV[0]
 		v9 := BLAKE2B_IV[1]
 		v9 := BLAKE2B_IV[1]
@@ -1429,13 +1476,13 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v13 := BLAKE2B_IV[5] ~ ctx.t[1]
 		v13 := BLAKE2B_IV[5] ~ ctx.t[1]
 		v14 := BLAKE2B_IV[6] ~ ctx.f[0]
 		v14 := BLAKE2B_IV[6] ~ ctx.f[0]
 		v15 := BLAKE2B_IV[7] ~ ctx.f[1]
 		v15 := BLAKE2B_IV[7] ~ ctx.f[1]
+
 		m: [16]u64 = ---
 		m: [16]u64 = ---
-		j := 0
-		for i := 0; i < 16; i+=1 {
-			m[i] = u64(p[j]) 		   | u64(p[j + 1]) << 8  | u64(p[j + 2]) << 16 | u64(p[j + 3]) << 24 |
-				   u64(p[j + 4]) << 32 | u64(p[j + 5]) << 40 | u64(p[j + 6]) << 48 | u64(p[j + 7]) << 56
-			j += 8
+		for i := 0; i < 16; i += 1 {
+			m[i] = endian.unchecked_get_u64le(p[i * 8:])
 		}
 		}
+
+		// Round 1
 		v0 += m[0]
 		v0 += m[0]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1548,6 +1595,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 2
 		v0 += m[14]
 		v0 += m[14]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1660,6 +1709,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 3
 		v0 += m[11]
 		v0 += m[11]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1772,6 +1823,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 4
 		v0 += m[7]
 		v0 += m[7]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1884,6 +1937,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 5
 		v0 += m[9]
 		v0 += m[9]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -1996,6 +2051,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 6
 		v0 += m[2]
 		v0 += m[2]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2108,6 +2165,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 7
 		v0 += m[12]
 		v0 += m[12]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2220,6 +2279,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 8
 		v0 += m[13]
 		v0 += m[13]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2332,6 +2393,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 9
 		v0 += m[6]
 		v0 += m[6]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2444,6 +2507,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 10
 		v0 += m[10]
 		v0 += m[10]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2556,6 +2621,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 11
 		v0 += m[0]
 		v0 += m[0]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2668,6 +2735,8 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
+		// Round 12
 		v0 += m[14]
 		v0 += m[14]
 		v0 += v4
 		v0 += v4
 		v12 ~= v0
 		v12 ~= v0
@@ -2780,6 +2849,7 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		v10 += v15
 		v10 += v15
 		v5 ~= v10
 		v5 ~= v10
 		v5 = v5 << (64 - 63) | v5 >> 63
 		v5 = v5 << (64 - 63) | v5 >> 63
+
 		h0 ~= v0 ~ v8
 		h0 ~= v0 ~ v8
 		h1 ~= v1 ~ v9
 		h1 ~= v1 ~ v9
 		h2 ~= v2 ~ v10
 		h2 ~= v2 ~ v10
@@ -2788,7 +2858,9 @@ blake2b_blocks :: #force_inline proc "contextless" (ctx: ^Blake2b_Context, p: []
 		h5 ~= v5 ~ v13
 		h5 ~= v5 ~ v13
 		h6 ~= v6 ~ v14
 		h6 ~= v6 ~ v14
 		h7 ~= v7 ~ v15
 		h7 ~= v7 ~ v15
+
 		p = p[BLAKE2B_BLOCK_SIZE:]
 		p = p[BLAKE2B_BLOCK_SIZE:]
 	}
 	}
-	ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] = h0, h1, h2, h3, h4, h5, h6, h7
-}
+	ctx.h[0], ctx.h[1], ctx.h[2], ctx.h[3], ctx.h[4], ctx.h[5], ctx.h[6], ctx.h[7] =
+		h0, h1, h2, h3, h4, h5, h6, h7
+}

+ 20 - 33
core/crypto/_fiat/field_poly1305/field.odin

@@ -1,6 +1,6 @@
 package field_poly1305
 package field_poly1305
 
 
-import "core:crypto/util"
+import "core:encoding/endian"
 import "core:mem"
 import "core:mem"
 
 
 fe_relax_cast :: #force_inline proc "contextless" (arg1: ^Tight_Field_Element) -> ^Loose_Field_Element {
 fe_relax_cast :: #force_inline proc "contextless" (arg1: ^Tight_Field_Element) -> ^Loose_Field_Element {
@@ -11,7 +11,7 @@ fe_tighten_cast :: #force_inline proc "contextless" (arg1: ^Loose_Field_Element)
 	return transmute(^Tight_Field_Element)(arg1)
 	return transmute(^Tight_Field_Element)(arg1)
 }
 }
 
 
-fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte, sanitize: bool = true) {
+fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, arg2: byte) {
 	// fiat-crypto's deserialization routine effectively processes a
 	// fiat-crypto's deserialization routine effectively processes a
 	// single byte at a time, and wants 256-bits of input for a value
 	// single byte at a time, and wants 256-bits of input for a value
 	// that will be 128-bits or 129-bits.
 	// that will be 128-bits or 129-bits.
@@ -22,42 +22,29 @@ fe_from_bytes :: #force_inline proc (out1: ^Tight_Field_Element, arg1: []byte, a
 
 
 	assert(len(arg1) == 16)
 	assert(len(arg1) == 16)
 
 
-	when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 {
-		// While it may be unwise to do deserialization here on our
-		// own when fiat-crypto provides equivalent functionality,
-		// doing it this way provides a little under 3x performance
-		// improvement when optimization is enabled.
-		src_p := transmute(^[2]u64)(&arg1[0])
-		lo := src_p[0]
-		hi := src_p[1]
+	// While it may be unwise to do deserialization here on our
+	// own when fiat-crypto provides equivalent functionality,
+	// doing it this way provides a little under 3x performance
+	// improvement when optimization is enabled.
+	lo := endian.unchecked_get_u64le(arg1[0:])
+	hi := endian.unchecked_get_u64le(arg1[8:])
 
 
-		// This is inspired by poly1305-donna, though adjustments were
-		// made since a Tight_Field_Element's limbs are 44-bits, 43-bits,
-		// and 43-bits wide.
-		//
-		// Note: This could be transplated into fe_from_u64s, but that
-		// code is called once per MAC, and is non-criticial path.
-		hibit := u64(arg2) << 41 // arg2 << 128
-		out1[0] = lo & 0xfffffffffff
-		out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff
-		out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit
-	} else {
-		tmp: [32]byte
-		copy_slice(tmp[0:16], arg1[:])
-		tmp[16] = arg2
-
-		_fe_from_bytes(out1, &tmp)
-		if sanitize {
-			// This is used to deserialize `s` which is confidential.
-			mem.zero_explicit(&tmp, size_of(tmp))
-		}
-	}
+	// This is inspired by poly1305-donna, though adjustments were
+	// made since a Tight_Field_Element's limbs are 44-bits, 43-bits,
+	// and 43-bits wide.
+	//
+	// Note: This could be transplated into fe_from_u64s, but that
+	// code is called once per MAC, and is non-criticial path.
+	hibit := u64(arg2) << 41 // arg2 << 128
+	out1[0] = lo & 0xfffffffffff
+	out1[1] = ((lo >> 44) | (hi << 20)) & 0x7ffffffffff
+	out1[2] = ((hi >> 23) & 0x7ffffffffff) | hibit
 }
 }
 
 
 fe_from_u64s :: proc "contextless" (out1: ^Tight_Field_Element, lo, hi: u64) {
 fe_from_u64s :: proc "contextless" (out1: ^Tight_Field_Element, lo, hi: u64) {
 	tmp: [32]byte
 	tmp: [32]byte
-	util.PUT_U64_LE(tmp[0:8], lo)
-	util.PUT_U64_LE(tmp[8:16], hi)
+	endian.unchecked_put_u64le(tmp[0:], lo)
+	endian.unchecked_put_u64le(tmp[8:], hi)
 
 
 	_fe_from_bytes(out1, &tmp)
 	_fe_from_bytes(out1, &tmp)
 
 

+ 152 - 138
core/crypto/_sha3/sha3.odin

@@ -11,159 +11,173 @@ package _sha3
     To use the original Keccak padding, set the is_keccak bool to true, otherwise it will use SHA3 padding.
     To use the original Keccak padding, set the is_keccak bool to true, otherwise it will use SHA3 padding.
 */
 */
 
 
-import "../util"
+import "core:math/bits"
 
 
 ROUNDS :: 24
 ROUNDS :: 24
 
 
 Sha3_Context :: struct {
 Sha3_Context :: struct {
-    st: struct #raw_union {
-        b: [200]u8,
-        q: [25]u64,
-    },
-    pt:        int,
-    rsiz:      int,
-    mdlen:     int,
-    is_keccak: bool,
+	st:        struct #raw_union {
+		b: [200]u8,
+		q: [25]u64,
+	},
+	pt:        int,
+	rsiz:      int,
+	mdlen:     int,
+	is_keccak: bool,
+
+	is_initialized: bool,
+	is_finalized:   bool, // For SHAKE (unlimited squeeze is allowed)
 }
 }
 
 
 keccakf :: proc "contextless" (st: ^[25]u64) {
 keccakf :: proc "contextless" (st: ^[25]u64) {
-    keccakf_rndc := [?]u64 {
-        0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
-        0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
-        0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
-        0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
-        0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
-        0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
-        0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
-        0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
-    }
-
-    keccakf_rotc := [?]i32 {
-        1,  3,  6,  10, 15, 21, 28, 36, 45, 55, 2,  14,
-        27, 41, 56, 8,  25, 43, 62, 18, 39, 61, 20, 44,
-    }
-
-    keccakf_piln := [?]i32 {
-        10, 7,  11, 17, 18, 3, 5,  16, 8,  21, 24, 4,
-        15, 23, 19, 13, 12, 2, 20, 14, 22, 9,  6,  1,
-    }
-
-    i, j, r: i32 = ---, ---, ---
-    t: u64       = ---
-    bc: [5]u64   = ---
-
-    when ODIN_ENDIAN != .Little {
-        v: uintptr = ---
-        for i = 0; i < 25; i += 1 {
-            v := uintptr(&st[i])
-            st[i] = u64((^u8)(v + 0)^ << 0)  | u64((^u8)(v + 1)^ << 8)  |
-                    u64((^u8)(v + 2)^ << 16) | u64((^u8)(v + 3)^ << 24) |
-                    u64((^u8)(v + 4)^ << 32) | u64((^u8)(v + 5)^ << 40) |
-                    u64((^u8)(v + 6)^ << 48) | u64((^u8)(v + 7)^ << 56)
-        }
-    }
-
-    for r = 0; r < ROUNDS; r += 1 {
-        // theta
-        for i = 0; i < 5; i += 1 {
-            bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
-        }
-
-        for i = 0; i < 5; i += 1 {
-            t = bc[(i + 4) % 5] ~ util.ROTL64(bc[(i + 1) % 5], 1)
-            for j = 0; j < 25; j += 5 {
-                st[j + i] ~= t
-            }
-        }
-
-        // rho pi
-        t = st[1]
-        for i = 0; i < 24; i += 1 {
-            j = keccakf_piln[i]
-            bc[0] = st[j]
-            st[j] = util.ROTL64(t, u64(keccakf_rotc[i]))
-            t = bc[0]
-        }
-
-        // chi
-        for j = 0; j < 25; j += 5 {
-            for i = 0; i < 5; i += 1 {
-                bc[i] = st[j + i]
-            }
-            for i = 0; i < 5; i += 1 {
-                st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
-            }
-        }
-
-        st[0] ~= keccakf_rndc[r]
-    }
-
-    when ODIN_ENDIAN != .Little {
-        for i = 0; i < 25; i += 1 {
-            v = uintptr(&st[i])
-            t = st[i]
-            (^u8)(v + 0)^ = (t >> 0)  & 0xff
-            (^u8)(v + 1)^ = (t >> 8)  & 0xff
-            (^u8)(v + 2)^ = (t >> 16) & 0xff
-            (^u8)(v + 3)^ = (t >> 24) & 0xff
-            (^u8)(v + 4)^ = (t >> 32) & 0xff
-            (^u8)(v + 5)^ = (t >> 40) & 0xff
-            (^u8)(v + 6)^ = (t >> 48) & 0xff
-            (^u8)(v + 7)^ = (t >> 56) & 0xff
-        }
-    }
+	keccakf_rndc := [?]u64 {
+		0x0000000000000001, 0x0000000000008082, 0x800000000000808a,
+		0x8000000080008000, 0x000000000000808b, 0x0000000080000001,
+		0x8000000080008081, 0x8000000000008009, 0x000000000000008a,
+		0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
+		0x000000008000808b, 0x800000000000008b, 0x8000000000008089,
+		0x8000000000008003, 0x8000000000008002, 0x8000000000000080,
+		0x000000000000800a, 0x800000008000000a, 0x8000000080008081,
+		0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
+	}
+
+	keccakf_rotc := [?]int {
+		1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
+		27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
+	}
+
+	keccakf_piln := [?]i32 {
+		10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
+		15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
+	}
+
+	i, j, r: i32 = ---, ---, ---
+	t: u64 = ---
+	bc: [5]u64 = ---
+
+	when ODIN_ENDIAN != .Little {
+		for i = 0; i < 25; i += 1 {
+			st[i] = bits.byte_swap(st[i])
+		}
+	}
+
+	for r = 0; r < ROUNDS; r += 1 {
+		// theta
+		for i = 0; i < 5; i += 1 {
+			bc[i] = st[i] ~ st[i + 5] ~ st[i + 10] ~ st[i + 15] ~ st[i + 20]
+		}
+
+		for i = 0; i < 5; i += 1 {
+			t = bc[(i + 4) % 5] ~ bits.rotate_left64(bc[(i + 1) % 5], 1)
+			for j = 0; j < 25; j += 5 {
+				st[j + i] ~= t
+			}
+		}
+
+		// rho pi
+		t = st[1]
+		for i = 0; i < 24; i += 1 {
+			j = keccakf_piln[i]
+			bc[0] = st[j]
+			st[j] = bits.rotate_left64(t, keccakf_rotc[i])
+			t = bc[0]
+		}
+
+		// chi
+		for j = 0; j < 25; j += 5 {
+			for i = 0; i < 5; i += 1 {
+				bc[i] = st[j + i]
+			}
+			for i = 0; i < 5; i += 1 {
+				st[j + i] ~= ~bc[(i + 1) % 5] & bc[(i + 2) % 5]
+			}
+		}
+
+		st[0] ~= keccakf_rndc[r]
+	}
+
+	when ODIN_ENDIAN != .Little {
+		for i = 0; i < 25; i += 1 {
+			st[i] = bits.byte_swap(st[i])
+		}
+	}
 }
 }
 
 
-init :: proc "contextless" (c: ^Sha3_Context) {
-    for i := 0; i < 25; i += 1 {
-        c.st.q[i] = 0
-    }
-    c.rsiz = 200 - 2 * c.mdlen
+init :: proc(c: ^Sha3_Context) {
+	for i := 0; i < 25; i += 1 {
+		c.st.q[i] = 0
+	}
+	c.rsiz = 200 - 2 * c.mdlen
+	c.pt = 0
+
+	c.is_initialized = true
+	c.is_finalized = false
 }
 }
 
 
-update :: proc "contextless" (c: ^Sha3_Context, data: []byte) {
-    j := c.pt
-    for i := 0; i < len(data); i += 1 {
-        c.st.b[j] ~= data[i]
-        j += 1
-        if j >= c.rsiz {
-            keccakf(&c.st.q)
-            j = 0
-        }
-    }
-    c.pt = j
+update :: proc(c: ^Sha3_Context, data: []byte) {
+	assert(c.is_initialized)
+	assert(!c.is_finalized)
+
+	j := c.pt
+	for i := 0; i < len(data); i += 1 {
+		c.st.b[j] ~= data[i]
+		j += 1
+		if j >= c.rsiz {
+			keccakf(&c.st.q)
+			j = 0
+		}
+	}
+	c.pt = j
 }
 }
 
 
-final :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
-    if c.is_keccak {
-        c.st.b[c.pt] ~= 0x01
-    } else {
-        c.st.b[c.pt] ~= 0x06
-    }
-    
-    c.st.b[c.rsiz - 1] ~= 0x80
-    keccakf(&c.st.q)
-    for i := 0; i < c.mdlen; i += 1 {
-        hash[i] = c.st.b[i]
-    }
+final :: proc(c: ^Sha3_Context, hash: []byte) {
+	assert(c.is_initialized)
+
+	if len(hash) < c.mdlen {
+		if c.is_keccak {
+			panic("crypto/keccac: invalid destination digest size")
+		}
+		panic("crypto/sha3: invalid destination digest size")
+	}
+	if c.is_keccak {
+		c.st.b[c.pt] ~= 0x01
+	} else {
+		c.st.b[c.pt] ~= 0x06
+	}
+
+	c.st.b[c.rsiz - 1] ~= 0x80
+	keccakf(&c.st.q)
+	for i := 0; i < c.mdlen; i += 1 {
+		hash[i] = c.st.b[i]
+	}
+
+	c.is_initialized = false // No more absorb, no more squeeze.
 }
 }
 
 
-shake_xof :: proc "contextless" (c: ^Sha3_Context) {
-    c.st.b[c.pt]       ~= 0x1F
-    c.st.b[c.rsiz - 1] ~= 0x80
-    keccakf(&c.st.q)
-    c.pt = 0
+shake_xof :: proc(c: ^Sha3_Context) {
+	assert(c.is_initialized)
+	assert(!c.is_finalized)
+
+	c.st.b[c.pt] ~= 0x1F
+	c.st.b[c.rsiz - 1] ~= 0x80
+	keccakf(&c.st.q)
+	c.pt = 0
+
+	c.is_finalized = true // No more absorb, unlimited squeeze.
 }
 }
 
 
-shake_out :: proc "contextless" (c: ^Sha3_Context, hash: []byte) {
-    j := c.pt
-    for i := 0; i < len(hash); i += 1 {
-        if j >= c.rsiz {
-            keccakf(&c.st.q)
-            j = 0
-        }
-        hash[i] = c.st.b[j]
-        j += 1
-    }
-    c.pt = j
+shake_out :: proc(c: ^Sha3_Context, hash: []byte) {
+	assert(c.is_initialized)
+	assert(c.is_finalized)
+
+	j := c.pt
+	for i := 0; i < len(hash); i += 1 {
+		if j >= c.rsiz {
+			keccakf(&c.st.q)
+			j = 0
+		}
+		hash[i] = c.st.b[j]
+		j += 1
+	}
+	c.pt = j
 }
 }

+ 0 - 410
core/crypto/_tiger/tiger.odin

@@ -1,410 +0,0 @@
-package _tiger
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the Tiger hashing algorithm, as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
-*/
-
-import "../util"
-
-T1 := [?]u64 {
-	0x02aab17cf7e90c5e, 0xac424b03e243a8ec, 0x72cd5be30dd5fcd3, 0x6d019b93f6f97f3a,
-	0xcd9978ffd21f9193, 0x7573a1c9708029e2, 0xb164326b922a83c3, 0x46883eee04915870,
-	0xeaace3057103ece6, 0xc54169b808a3535c, 0x4ce754918ddec47c, 0x0aa2f4dfdc0df40c,
-	0x10b76f18a74dbefa, 0xc6ccb6235ad1ab6a, 0x13726121572fe2ff, 0x1a488c6f199d921e,
-	0x4bc9f9f4da0007ca, 0x26f5e6f6e85241c7, 0x859079dbea5947b6, 0x4f1885c5c99e8c92,
-	0xd78e761ea96f864b, 0x8e36428c52b5c17d, 0x69cf6827373063c1, 0xb607c93d9bb4c56e,
-	0x7d820e760e76b5ea, 0x645c9cc6f07fdc42, 0xbf38a078243342e0, 0x5f6b343c9d2e7d04,
-	0xf2c28aeb600b0ec6, 0x6c0ed85f7254bcac, 0x71592281a4db4fe5, 0x1967fa69ce0fed9f,
-	0xfd5293f8b96545db, 0xc879e9d7f2a7600b, 0x860248920193194e, 0xa4f9533b2d9cc0b3,
-	0x9053836c15957613, 0xdb6dcf8afc357bf1, 0x18beea7a7a370f57, 0x037117ca50b99066,
-	0x6ab30a9774424a35, 0xf4e92f02e325249b, 0x7739db07061ccae1, 0xd8f3b49ceca42a05,
-	0xbd56be3f51382f73, 0x45faed5843b0bb28, 0x1c813d5c11bf1f83, 0x8af0e4b6d75fa169,
-	0x33ee18a487ad9999, 0x3c26e8eab1c94410, 0xb510102bc0a822f9, 0x141eef310ce6123b,
-	0xfc65b90059ddb154, 0xe0158640c5e0e607, 0x884e079826c3a3cf, 0x930d0d9523c535fd,
-	0x35638d754e9a2b00, 0x4085fccf40469dd5, 0xc4b17ad28be23a4c, 0xcab2f0fc6a3e6a2e,
-	0x2860971a6b943fcd, 0x3dde6ee212e30446, 0x6222f32ae01765ae, 0x5d550bb5478308fe,
-	0xa9efa98da0eda22a, 0xc351a71686c40da7, 0x1105586d9c867c84, 0xdcffee85fda22853,
-	0xccfbd0262c5eef76, 0xbaf294cb8990d201, 0xe69464f52afad975, 0x94b013afdf133e14,
-	0x06a7d1a32823c958, 0x6f95fe5130f61119, 0xd92ab34e462c06c0, 0xed7bde33887c71d2,
-	0x79746d6e6518393e, 0x5ba419385d713329, 0x7c1ba6b948a97564, 0x31987c197bfdac67,
-	0xde6c23c44b053d02, 0x581c49fed002d64d, 0xdd474d6338261571, 0xaa4546c3e473d062,
-	0x928fce349455f860, 0x48161bbacaab94d9, 0x63912430770e6f68, 0x6ec8a5e602c6641c,
-	0x87282515337ddd2b, 0x2cda6b42034b701b, 0xb03d37c181cb096d, 0xe108438266c71c6f,
-	0x2b3180c7eb51b255, 0xdf92b82f96c08bbc, 0x5c68c8c0a632f3ba, 0x5504cc861c3d0556,
-	0xabbfa4e55fb26b8f, 0x41848b0ab3baceb4, 0xb334a273aa445d32, 0xbca696f0a85ad881,
-	0x24f6ec65b528d56c, 0x0ce1512e90f4524a, 0x4e9dd79d5506d35a, 0x258905fac6ce9779,
-	0x2019295b3e109b33, 0xf8a9478b73a054cc, 0x2924f2f934417eb0, 0x3993357d536d1bc4,
-	0x38a81ac21db6ff8b, 0x47c4fbf17d6016bf, 0x1e0faadd7667e3f5, 0x7abcff62938beb96,
-	0xa78dad948fc179c9, 0x8f1f98b72911e50d, 0x61e48eae27121a91, 0x4d62f7ad31859808,
-	0xeceba345ef5ceaeb, 0xf5ceb25ebc9684ce, 0xf633e20cb7f76221, 0xa32cdf06ab8293e4,
-	0x985a202ca5ee2ca4, 0xcf0b8447cc8a8fb1, 0x9f765244979859a3, 0xa8d516b1a1240017,
-	0x0bd7ba3ebb5dc726, 0xe54bca55b86adb39, 0x1d7a3afd6c478063, 0x519ec608e7669edd,
-	0x0e5715a2d149aa23, 0x177d4571848ff194, 0xeeb55f3241014c22, 0x0f5e5ca13a6e2ec2,
-	0x8029927b75f5c361, 0xad139fabc3d6e436, 0x0d5df1a94ccf402f, 0x3e8bd948bea5dfc8,
-	0xa5a0d357bd3ff77e, 0xa2d12e251f74f645, 0x66fd9e525e81a082, 0x2e0c90ce7f687a49,
-	0xc2e8bcbeba973bc5, 0x000001bce509745f, 0x423777bbe6dab3d6, 0xd1661c7eaef06eb5,
-	0xa1781f354daacfd8, 0x2d11284a2b16affc, 0xf1fc4f67fa891d1f, 0x73ecc25dcb920ada,
-	0xae610c22c2a12651, 0x96e0a810d356b78a, 0x5a9a381f2fe7870f, 0xd5ad62ede94e5530,
-	0xd225e5e8368d1427, 0x65977b70c7af4631, 0x99f889b2de39d74f, 0x233f30bf54e1d143,
-	0x9a9675d3d9a63c97, 0x5470554ff334f9a8, 0x166acb744a4f5688, 0x70c74caab2e4aead,
-	0xf0d091646f294d12, 0x57b82a89684031d1, 0xefd95a5a61be0b6b, 0x2fbd12e969f2f29a,
-	0x9bd37013feff9fe8, 0x3f9b0404d6085a06, 0x4940c1f3166cfe15, 0x09542c4dcdf3defb,
-	0xb4c5218385cd5ce3, 0xc935b7dc4462a641, 0x3417f8a68ed3b63f, 0xb80959295b215b40,
-	0xf99cdaef3b8c8572, 0x018c0614f8fcb95d, 0x1b14accd1a3acdf3, 0x84d471f200bb732d,
-	0xc1a3110e95e8da16, 0x430a7220bf1a82b8, 0xb77e090d39df210e, 0x5ef4bd9f3cd05e9d,
-	0x9d4ff6da7e57a444, 0xda1d60e183d4a5f8, 0xb287c38417998e47, 0xfe3edc121bb31886,
-	0xc7fe3ccc980ccbef, 0xe46fb590189bfd03, 0x3732fd469a4c57dc, 0x7ef700a07cf1ad65,
-	0x59c64468a31d8859, 0x762fb0b4d45b61f6, 0x155baed099047718, 0x68755e4c3d50baa6,
-	0xe9214e7f22d8b4df, 0x2addbf532eac95f4, 0x32ae3909b4bd0109, 0x834df537b08e3450,
-	0xfa209da84220728d, 0x9e691d9b9efe23f7, 0x0446d288c4ae8d7f, 0x7b4cc524e169785b,
-	0x21d87f0135ca1385, 0xcebb400f137b8aa5, 0x272e2b66580796be, 0x3612264125c2b0de,
-	0x057702bdad1efbb2, 0xd4babb8eacf84be9, 0x91583139641bc67b, 0x8bdc2de08036e024,
-	0x603c8156f49f68ed, 0xf7d236f7dbef5111, 0x9727c4598ad21e80, 0xa08a0896670a5fd7,
-	0xcb4a8f4309eba9cb, 0x81af564b0f7036a1, 0xc0b99aa778199abd, 0x959f1ec83fc8e952,
-	0x8c505077794a81b9, 0x3acaaf8f056338f0, 0x07b43f50627a6778, 0x4a44ab49f5eccc77,
-	0x3bc3d6e4b679ee98, 0x9cc0d4d1cf14108c, 0x4406c00b206bc8a0, 0x82a18854c8d72d89,
-	0x67e366b35c3c432c, 0xb923dd61102b37f2, 0x56ab2779d884271d, 0xbe83e1b0ff1525af,
-	0xfb7c65d4217e49a9, 0x6bdbe0e76d48e7d4, 0x08df828745d9179e, 0x22ea6a9add53bd34,
-	0xe36e141c5622200a, 0x7f805d1b8cb750ee, 0xafe5c7a59f58e837, 0xe27f996a4fb1c23c,
-	0xd3867dfb0775f0d0, 0xd0e673de6e88891a, 0x123aeb9eafb86c25, 0x30f1d5d5c145b895,
-	0xbb434a2dee7269e7, 0x78cb67ecf931fa38, 0xf33b0372323bbf9c, 0x52d66336fb279c74,
-	0x505f33ac0afb4eaa, 0xe8a5cd99a2cce187, 0x534974801e2d30bb, 0x8d2d5711d5876d90,
-	0x1f1a412891bc038e, 0xd6e2e71d82e56648, 0x74036c3a497732b7, 0x89b67ed96361f5ab,
-	0xffed95d8f1ea02a2, 0xe72b3bd61464d43d, 0xa6300f170bdc4820, 0xebc18760ed78a77a,
-}
-
-T2 := [?]u64 {
-	0xe6a6be5a05a12138, 0xb5a122a5b4f87c98, 0x563c6089140b6990, 0x4c46cb2e391f5dd5,
-	0xd932addbc9b79434, 0x08ea70e42015aff5, 0xd765a6673e478cf1, 0xc4fb757eab278d99,
-	0xdf11c6862d6e0692, 0xddeb84f10d7f3b16, 0x6f2ef604a665ea04, 0x4a8e0f0ff0e0dfb3,
-	0xa5edeef83dbcba51, 0xfc4f0a2a0ea4371e, 0xe83e1da85cb38429, 0xdc8ff882ba1b1ce2,
-	0xcd45505e8353e80d, 0x18d19a00d4db0717, 0x34a0cfeda5f38101, 0x0be77e518887caf2,
-	0x1e341438b3c45136, 0xe05797f49089ccf9, 0xffd23f9df2591d14, 0x543dda228595c5cd,
-	0x661f81fd99052a33, 0x8736e641db0f7b76, 0x15227725418e5307, 0xe25f7f46162eb2fa,
-	0x48a8b2126c13d9fe, 0xafdc541792e76eea, 0x03d912bfc6d1898f, 0x31b1aafa1b83f51b,
-	0xf1ac2796e42ab7d9, 0x40a3a7d7fcd2ebac, 0x1056136d0afbbcc5, 0x7889e1dd9a6d0c85,
-	0xd33525782a7974aa, 0xa7e25d09078ac09b, 0xbd4138b3eac6edd0, 0x920abfbe71eb9e70,
-	0xa2a5d0f54fc2625c, 0xc054e36b0b1290a3, 0xf6dd59ff62fe932b, 0x3537354511a8ac7d,
-	0xca845e9172fadcd4, 0x84f82b60329d20dc, 0x79c62ce1cd672f18, 0x8b09a2add124642c,
-	0xd0c1e96a19d9e726, 0x5a786a9b4ba9500c, 0x0e020336634c43f3, 0xc17b474aeb66d822,
-	0x6a731ae3ec9baac2, 0x8226667ae0840258, 0x67d4567691caeca5, 0x1d94155c4875adb5,
-	0x6d00fd985b813fdf, 0x51286efcb774cd06, 0x5e8834471fa744af, 0xf72ca0aee761ae2e,
-	0xbe40e4cdaee8e09a, 0xe9970bbb5118f665, 0x726e4beb33df1964, 0x703b000729199762,
-	0x4631d816f5ef30a7, 0xb880b5b51504a6be, 0x641793c37ed84b6c, 0x7b21ed77f6e97d96,
-	0x776306312ef96b73, 0xae528948e86ff3f4, 0x53dbd7f286a3f8f8, 0x16cadce74cfc1063,
-	0x005c19bdfa52c6dd, 0x68868f5d64d46ad3, 0x3a9d512ccf1e186a, 0x367e62c2385660ae,
-	0xe359e7ea77dcb1d7, 0x526c0773749abe6e, 0x735ae5f9d09f734b, 0x493fc7cc8a558ba8,
-	0xb0b9c1533041ab45, 0x321958ba470a59bd, 0x852db00b5f46c393, 0x91209b2bd336b0e5,
-	0x6e604f7d659ef19f, 0xb99a8ae2782ccb24, 0xccf52ab6c814c4c7, 0x4727d9afbe11727b,
-	0x7e950d0c0121b34d, 0x756f435670ad471f, 0xf5add442615a6849, 0x4e87e09980b9957a,
-	0x2acfa1df50aee355, 0xd898263afd2fd556, 0xc8f4924dd80c8fd6, 0xcf99ca3d754a173a,
-	0xfe477bacaf91bf3c, 0xed5371f6d690c12d, 0x831a5c285e687094, 0xc5d3c90a3708a0a4,
-	0x0f7f903717d06580, 0x19f9bb13b8fdf27f, 0xb1bd6f1b4d502843, 0x1c761ba38fff4012,
-	0x0d1530c4e2e21f3b, 0x8943ce69a7372c8a, 0xe5184e11feb5ce66, 0x618bdb80bd736621,
-	0x7d29bad68b574d0b, 0x81bb613e25e6fe5b, 0x071c9c10bc07913f, 0xc7beeb7909ac2d97,
-	0xc3e58d353bc5d757, 0xeb017892f38f61e8, 0xd4effb9c9b1cc21a, 0x99727d26f494f7ab,
-	0xa3e063a2956b3e03, 0x9d4a8b9a4aa09c30, 0x3f6ab7d500090fb4, 0x9cc0f2a057268ac0,
-	0x3dee9d2dedbf42d1, 0x330f49c87960a972, 0xc6b2720287421b41, 0x0ac59ec07c00369c,
-	0xef4eac49cb353425, 0xf450244eef0129d8, 0x8acc46e5caf4deb6, 0x2ffeab63989263f7,
-	0x8f7cb9fe5d7a4578, 0x5bd8f7644e634635, 0x427a7315bf2dc900, 0x17d0c4aa2125261c,
-	0x3992486c93518e50, 0xb4cbfee0a2d7d4c3, 0x7c75d6202c5ddd8d, 0xdbc295d8e35b6c61,
-	0x60b369d302032b19, 0xce42685fdce44132, 0x06f3ddb9ddf65610, 0x8ea4d21db5e148f0,
-	0x20b0fce62fcd496f, 0x2c1b912358b0ee31, 0xb28317b818f5a308, 0xa89c1e189ca6d2cf,
-	0x0c6b18576aaadbc8, 0xb65deaa91299fae3, 0xfb2b794b7f1027e7, 0x04e4317f443b5beb,
-	0x4b852d325939d0a6, 0xd5ae6beefb207ffc, 0x309682b281c7d374, 0xbae309a194c3b475,
-	0x8cc3f97b13b49f05, 0x98a9422ff8293967, 0x244b16b01076ff7c, 0xf8bf571c663d67ee,
-	0x1f0d6758eee30da1, 0xc9b611d97adeb9b7, 0xb7afd5887b6c57a2, 0x6290ae846b984fe1,
-	0x94df4cdeacc1a5fd, 0x058a5bd1c5483aff, 0x63166cc142ba3c37, 0x8db8526eb2f76f40,
-	0xe10880036f0d6d4e, 0x9e0523c9971d311d, 0x45ec2824cc7cd691, 0x575b8359e62382c9,
-	0xfa9e400dc4889995, 0xd1823ecb45721568, 0xdafd983b8206082f, 0xaa7d29082386a8cb,
-	0x269fcd4403b87588, 0x1b91f5f728bdd1e0, 0xe4669f39040201f6, 0x7a1d7c218cf04ade,
-	0x65623c29d79ce5ce, 0x2368449096c00bb1, 0xab9bf1879da503ba, 0xbc23ecb1a458058e,
-	0x9a58df01bb401ecc, 0xa070e868a85f143d, 0x4ff188307df2239e, 0x14d565b41a641183,
-	0xee13337452701602, 0x950e3dcf3f285e09, 0x59930254b9c80953, 0x3bf299408930da6d,
-	0xa955943f53691387, 0xa15edecaa9cb8784, 0x29142127352be9a0, 0x76f0371fff4e7afb,
-	0x0239f450274f2228, 0xbb073af01d5e868b, 0xbfc80571c10e96c1, 0xd267088568222e23,
-	0x9671a3d48e80b5b0, 0x55b5d38ae193bb81, 0x693ae2d0a18b04b8, 0x5c48b4ecadd5335f,
-	0xfd743b194916a1ca, 0x2577018134be98c4, 0xe77987e83c54a4ad, 0x28e11014da33e1b9,
-	0x270cc59e226aa213, 0x71495f756d1a5f60, 0x9be853fb60afef77, 0xadc786a7f7443dbf,
-	0x0904456173b29a82, 0x58bc7a66c232bd5e, 0xf306558c673ac8b2, 0x41f639c6b6c9772a,
-	0x216defe99fda35da, 0x11640cc71c7be615, 0x93c43694565c5527, 0xea038e6246777839,
-	0xf9abf3ce5a3e2469, 0x741e768d0fd312d2, 0x0144b883ced652c6, 0xc20b5a5ba33f8552,
-	0x1ae69633c3435a9d, 0x97a28ca4088cfdec, 0x8824a43c1e96f420, 0x37612fa66eeea746,
-	0x6b4cb165f9cf0e5a, 0x43aa1c06a0abfb4a, 0x7f4dc26ff162796b, 0x6cbacc8e54ed9b0f,
-	0xa6b7ffefd2bb253e, 0x2e25bc95b0a29d4f, 0x86d6a58bdef1388c, 0xded74ac576b6f054,
-	0x8030bdbc2b45805d, 0x3c81af70e94d9289, 0x3eff6dda9e3100db, 0xb38dc39fdfcc8847,
-	0x123885528d17b87e, 0xf2da0ed240b1b642, 0x44cefadcd54bf9a9, 0x1312200e433c7ee6,
-	0x9ffcc84f3a78c748, 0xf0cd1f72248576bb, 0xec6974053638cfe4, 0x2ba7b67c0cec4e4c,
-	0xac2f4df3e5ce32ed, 0xcb33d14326ea4c11, 0xa4e9044cc77e58bc, 0x5f513293d934fcef,
-	0x5dc9645506e55444, 0x50de418f317de40a, 0x388cb31a69dde259, 0x2db4a83455820a86,
-	0x9010a91e84711ae9, 0x4df7f0b7b1498371, 0xd62a2eabc0977179, 0x22fac097aa8d5c0e,
-}
-
-T3 := [?]u64 {
-	0xf49fcc2ff1daf39b, 0x487fd5c66ff29281, 0xe8a30667fcdca83f, 0x2c9b4be3d2fcce63,
-	0xda3ff74b93fbbbc2, 0x2fa165d2fe70ba66, 0xa103e279970e93d4, 0xbecdec77b0e45e71,
-	0xcfb41e723985e497, 0xb70aaa025ef75017, 0xd42309f03840b8e0, 0x8efc1ad035898579,
-	0x96c6920be2b2abc5, 0x66af4163375a9172, 0x2174abdcca7127fb, 0xb33ccea64a72ff41,
-	0xf04a4933083066a5, 0x8d970acdd7289af5, 0x8f96e8e031c8c25e, 0xf3fec02276875d47,
-	0xec7bf310056190dd, 0xf5adb0aebb0f1491, 0x9b50f8850fd58892, 0x4975488358b74de8,
-	0xa3354ff691531c61, 0x0702bbe481d2c6ee, 0x89fb24057deded98, 0xac3075138596e902,
-	0x1d2d3580172772ed, 0xeb738fc28e6bc30d, 0x5854ef8f63044326, 0x9e5c52325add3bbe,
-	0x90aa53cf325c4623, 0xc1d24d51349dd067, 0x2051cfeea69ea624, 0x13220f0a862e7e4f,
-	0xce39399404e04864, 0xd9c42ca47086fcb7, 0x685ad2238a03e7cc, 0x066484b2ab2ff1db,
-	0xfe9d5d70efbf79ec, 0x5b13b9dd9c481854, 0x15f0d475ed1509ad, 0x0bebcd060ec79851,
-	0xd58c6791183ab7f8, 0xd1187c5052f3eee4, 0xc95d1192e54e82ff, 0x86eea14cb9ac6ca2,
-	0x3485beb153677d5d, 0xdd191d781f8c492a, 0xf60866baa784ebf9, 0x518f643ba2d08c74,
-	0x8852e956e1087c22, 0xa768cb8dc410ae8d, 0x38047726bfec8e1a, 0xa67738b4cd3b45aa,
-	0xad16691cec0dde19, 0xc6d4319380462e07, 0xc5a5876d0ba61938, 0x16b9fa1fa58fd840,
-	0x188ab1173ca74f18, 0xabda2f98c99c021f, 0x3e0580ab134ae816, 0x5f3b05b773645abb,
-	0x2501a2be5575f2f6, 0x1b2f74004e7e8ba9, 0x1cd7580371e8d953, 0x7f6ed89562764e30,
-	0xb15926ff596f003d, 0x9f65293da8c5d6b9, 0x6ecef04dd690f84c, 0x4782275fff33af88,
-	0xe41433083f820801, 0xfd0dfe409a1af9b5, 0x4325a3342cdb396b, 0x8ae77e62b301b252,
-	0xc36f9e9f6655615a, 0x85455a2d92d32c09, 0xf2c7dea949477485, 0x63cfb4c133a39eba,
-	0x83b040cc6ebc5462, 0x3b9454c8fdb326b0, 0x56f56a9e87ffd78c, 0x2dc2940d99f42bc6,
-	0x98f7df096b096e2d, 0x19a6e01e3ad852bf, 0x42a99ccbdbd4b40b, 0xa59998af45e9c559,
-	0x366295e807d93186, 0x6b48181bfaa1f773, 0x1fec57e2157a0a1d, 0x4667446af6201ad5,
-	0xe615ebcacfb0f075, 0xb8f31f4f68290778, 0x22713ed6ce22d11e, 0x3057c1a72ec3c93b,
-	0xcb46acc37c3f1f2f, 0xdbb893fd02aaf50e, 0x331fd92e600b9fcf, 0xa498f96148ea3ad6,
-	0xa8d8426e8b6a83ea, 0xa089b274b7735cdc, 0x87f6b3731e524a11, 0x118808e5cbc96749,
-	0x9906e4c7b19bd394, 0xafed7f7e9b24a20c, 0x6509eadeeb3644a7, 0x6c1ef1d3e8ef0ede,
-	0xb9c97d43e9798fb4, 0xa2f2d784740c28a3, 0x7b8496476197566f, 0x7a5be3e6b65f069d,
-	0xf96330ed78be6f10, 0xeee60de77a076a15, 0x2b4bee4aa08b9bd0, 0x6a56a63ec7b8894e,
-	0x02121359ba34fef4, 0x4cbf99f8283703fc, 0x398071350caf30c8, 0xd0a77a89f017687a,
-	0xf1c1a9eb9e423569, 0x8c7976282dee8199, 0x5d1737a5dd1f7abd, 0x4f53433c09a9fa80,
-	0xfa8b0c53df7ca1d9, 0x3fd9dcbc886ccb77, 0xc040917ca91b4720, 0x7dd00142f9d1dcdf,
-	0x8476fc1d4f387b58, 0x23f8e7c5f3316503, 0x032a2244e7e37339, 0x5c87a5d750f5a74b,
-	0x082b4cc43698992e, 0xdf917becb858f63c, 0x3270b8fc5bf86dda, 0x10ae72bb29b5dd76,
-	0x576ac94e7700362b, 0x1ad112dac61efb8f, 0x691bc30ec5faa427, 0xff246311cc327143,
-	0x3142368e30e53206, 0x71380e31e02ca396, 0x958d5c960aad76f1, 0xf8d6f430c16da536,
-	0xc8ffd13f1be7e1d2, 0x7578ae66004ddbe1, 0x05833f01067be646, 0xbb34b5ad3bfe586d,
-	0x095f34c9a12b97f0, 0x247ab64525d60ca8, 0xdcdbc6f3017477d1, 0x4a2e14d4decad24d,
-	0xbdb5e6d9be0a1eeb, 0x2a7e70f7794301ab, 0xdef42d8a270540fd, 0x01078ec0a34c22c1,
-	0xe5de511af4c16387, 0x7ebb3a52bd9a330a, 0x77697857aa7d6435, 0x004e831603ae4c32,
-	0xe7a21020ad78e312, 0x9d41a70c6ab420f2, 0x28e06c18ea1141e6, 0xd2b28cbd984f6b28,
-	0x26b75f6c446e9d83, 0xba47568c4d418d7f, 0xd80badbfe6183d8e, 0x0e206d7f5f166044,
-	0xe258a43911cbca3e, 0x723a1746b21dc0bc, 0xc7caa854f5d7cdd3, 0x7cac32883d261d9c,
-	0x7690c26423ba942c, 0x17e55524478042b8, 0xe0be477656a2389f, 0x4d289b5e67ab2da0,
-	0x44862b9c8fbbfd31, 0xb47cc8049d141365, 0x822c1b362b91c793, 0x4eb14655fb13dfd8,
-	0x1ecbba0714e2a97b, 0x6143459d5cde5f14, 0x53a8fbf1d5f0ac89, 0x97ea04d81c5e5b00,
-	0x622181a8d4fdb3f3, 0xe9bcd341572a1208, 0x1411258643cce58a, 0x9144c5fea4c6e0a4,
-	0x0d33d06565cf620f, 0x54a48d489f219ca1, 0xc43e5eac6d63c821, 0xa9728b3a72770daf,
-	0xd7934e7b20df87ef, 0xe35503b61a3e86e5, 0xcae321fbc819d504, 0x129a50b3ac60bfa6,
-	0xcd5e68ea7e9fb6c3, 0xb01c90199483b1c7, 0x3de93cd5c295376c, 0xaed52edf2ab9ad13,
-	0x2e60f512c0a07884, 0xbc3d86a3e36210c9, 0x35269d9b163951ce, 0x0c7d6e2ad0cdb5fa,
-	0x59e86297d87f5733, 0x298ef221898db0e7, 0x55000029d1a5aa7e, 0x8bc08ae1b5061b45,
-	0xc2c31c2b6c92703a, 0x94cc596baf25ef42, 0x0a1d73db22540456, 0x04b6a0f9d9c4179a,
-	0xeffdafa2ae3d3c60, 0xf7c8075bb49496c4, 0x9cc5c7141d1cd4e3, 0x78bd1638218e5534,
-	0xb2f11568f850246a, 0xedfabcfa9502bc29, 0x796ce5f2da23051b, 0xaae128b0dc93537c,
-	0x3a493da0ee4b29ae, 0xb5df6b2c416895d7, 0xfcabbd25122d7f37, 0x70810b58105dc4b1,
-	0xe10fdd37f7882a90, 0x524dcab5518a3f5c, 0x3c9e85878451255b, 0x4029828119bd34e2,
-	0x74a05b6f5d3ceccb, 0xb610021542e13eca, 0x0ff979d12f59e2ac, 0x6037da27e4f9cc50,
-	0x5e92975a0df1847d, 0xd66de190d3e623fe, 0x5032d6b87b568048, 0x9a36b7ce8235216e,
-	0x80272a7a24f64b4a, 0x93efed8b8c6916f7, 0x37ddbff44cce1555, 0x4b95db5d4b99bd25,
-	0x92d3fda169812fc0, 0xfb1a4a9a90660bb6, 0x730c196946a4b9b2, 0x81e289aa7f49da68,
-	0x64669a0f83b1a05f, 0x27b3ff7d9644f48b, 0xcc6b615c8db675b3, 0x674f20b9bcebbe95,
-	0x6f31238275655982, 0x5ae488713e45cf05, 0xbf619f9954c21157, 0xeabac46040a8eae9,
-	0x454c6fe9f2c0c1cd, 0x419cf6496412691c, 0xd3dc3bef265b0f70, 0x6d0e60f5c3578a9e,
-}
-
-T4 := [?]u64 {
-	0x5b0e608526323c55, 0x1a46c1a9fa1b59f5, 0xa9e245a17c4c8ffa, 0x65ca5159db2955d7,
-	0x05db0a76ce35afc2, 0x81eac77ea9113d45, 0x528ef88ab6ac0a0d, 0xa09ea253597be3ff,
-	0x430ddfb3ac48cd56, 0xc4b3a67af45ce46f, 0x4ececfd8fbe2d05e, 0x3ef56f10b39935f0,
-	0x0b22d6829cd619c6, 0x17fd460a74df2069, 0x6cf8cc8e8510ed40, 0xd6c824bf3a6ecaa7,
-	0x61243d581a817049, 0x048bacb6bbc163a2, 0xd9a38ac27d44cc32, 0x7fddff5baaf410ab,
-	0xad6d495aa804824b, 0xe1a6a74f2d8c9f94, 0xd4f7851235dee8e3, 0xfd4b7f886540d893,
-	0x247c20042aa4bfda, 0x096ea1c517d1327c, 0xd56966b4361a6685, 0x277da5c31221057d,
-	0x94d59893a43acff7, 0x64f0c51ccdc02281, 0x3d33bcc4ff6189db, 0xe005cb184ce66af1,
-	0xff5ccd1d1db99bea, 0xb0b854a7fe42980f, 0x7bd46a6a718d4b9f, 0xd10fa8cc22a5fd8c,
-	0xd31484952be4bd31, 0xc7fa975fcb243847, 0x4886ed1e5846c407, 0x28cddb791eb70b04,
-	0xc2b00be2f573417f, 0x5c9590452180f877, 0x7a6bddfff370eb00, 0xce509e38d6d9d6a4,
-	0xebeb0f00647fa702, 0x1dcc06cf76606f06, 0xe4d9f28ba286ff0a, 0xd85a305dc918c262,
-	0x475b1d8732225f54, 0x2d4fb51668ccb5fe, 0xa679b9d9d72bba20, 0x53841c0d912d43a5,
-	0x3b7eaa48bf12a4e8, 0x781e0e47f22f1ddf, 0xeff20ce60ab50973, 0x20d261d19dffb742,
-	0x16a12b03062a2e39, 0x1960eb2239650495, 0x251c16fed50eb8b8, 0x9ac0c330f826016e,
-	0xed152665953e7671, 0x02d63194a6369570, 0x5074f08394b1c987, 0x70ba598c90b25ce1,
-	0x794a15810b9742f6, 0x0d5925e9fcaf8c6c, 0x3067716cd868744e, 0x910ab077e8d7731b,
-	0x6a61bbdb5ac42f61, 0x93513efbf0851567, 0xf494724b9e83e9d5, 0xe887e1985c09648d,
-	0x34b1d3c675370cfd, 0xdc35e433bc0d255d, 0xd0aab84234131be0, 0x08042a50b48b7eaf,
-	0x9997c4ee44a3ab35, 0x829a7b49201799d0, 0x263b8307b7c54441, 0x752f95f4fd6a6ca6,
-	0x927217402c08c6e5, 0x2a8ab754a795d9ee, 0xa442f7552f72943d, 0x2c31334e19781208,
-	0x4fa98d7ceaee6291, 0x55c3862f665db309, 0xbd0610175d53b1f3, 0x46fe6cb840413f27,
-	0x3fe03792df0cfa59, 0xcfe700372eb85e8f, 0xa7be29e7adbce118, 0xe544ee5cde8431dd,
-	0x8a781b1b41f1873e, 0xa5c94c78a0d2f0e7, 0x39412e2877b60728, 0xa1265ef3afc9a62c,
-	0xbcc2770c6a2506c5, 0x3ab66dd5dce1ce12, 0xe65499d04a675b37, 0x7d8f523481bfd216,
-	0x0f6f64fcec15f389, 0x74efbe618b5b13c8, 0xacdc82b714273e1d, 0xdd40bfe003199d17,
-	0x37e99257e7e061f8, 0xfa52626904775aaa, 0x8bbbf63a463d56f9, 0xf0013f1543a26e64,
-	0xa8307e9f879ec898, 0xcc4c27a4150177cc, 0x1b432f2cca1d3348, 0xde1d1f8f9f6fa013,
-	0x606602a047a7ddd6, 0xd237ab64cc1cb2c7, 0x9b938e7225fcd1d3, 0xec4e03708e0ff476,
-	0xfeb2fbda3d03c12d, 0xae0bced2ee43889a, 0x22cb8923ebfb4f43, 0x69360d013cf7396d,
-	0x855e3602d2d4e022, 0x073805bad01f784c, 0x33e17a133852f546, 0xdf4874058ac7b638,
-	0xba92b29c678aa14a, 0x0ce89fc76cfaadcd, 0x5f9d4e0908339e34, 0xf1afe9291f5923b9,
-	0x6e3480f60f4a265f, 0xeebf3a2ab29b841c, 0xe21938a88f91b4ad, 0x57dfeff845c6d3c3,
-	0x2f006b0bf62caaf2, 0x62f479ef6f75ee78, 0x11a55ad41c8916a9, 0xf229d29084fed453,
-	0x42f1c27b16b000e6, 0x2b1f76749823c074, 0x4b76eca3c2745360, 0x8c98f463b91691bd,
-	0x14bcc93cf1ade66a, 0x8885213e6d458397, 0x8e177df0274d4711, 0xb49b73b5503f2951,
-	0x10168168c3f96b6b, 0x0e3d963b63cab0ae, 0x8dfc4b5655a1db14, 0xf789f1356e14de5c,
-	0x683e68af4e51dac1, 0xc9a84f9d8d4b0fd9, 0x3691e03f52a0f9d1, 0x5ed86e46e1878e80,
-	0x3c711a0e99d07150, 0x5a0865b20c4e9310, 0x56fbfc1fe4f0682e, 0xea8d5de3105edf9b,
-	0x71abfdb12379187a, 0x2eb99de1bee77b9c, 0x21ecc0ea33cf4523, 0x59a4d7521805c7a1,
-	0x3896f5eb56ae7c72, 0xaa638f3db18f75dc, 0x9f39358dabe9808e, 0xb7defa91c00b72ac,
-	0x6b5541fd62492d92, 0x6dc6dee8f92e4d5b, 0x353f57abc4beea7e, 0x735769d6da5690ce,
-	0x0a234aa642391484, 0xf6f9508028f80d9d, 0xb8e319a27ab3f215, 0x31ad9c1151341a4d,
-	0x773c22a57bef5805, 0x45c7561a07968633, 0xf913da9e249dbe36, 0xda652d9b78a64c68,
-	0x4c27a97f3bc334ef, 0x76621220e66b17f4, 0x967743899acd7d0b, 0xf3ee5bcae0ed6782,
-	0x409f753600c879fc, 0x06d09a39b5926db6, 0x6f83aeb0317ac588, 0x01e6ca4a86381f21,
-	0x66ff3462d19f3025, 0x72207c24ddfd3bfb, 0x4af6b6d3e2ece2eb, 0x9c994dbec7ea08de,
-	0x49ace597b09a8bc4, 0xb38c4766cf0797ba, 0x131b9373c57c2a75, 0xb1822cce61931e58,
-	0x9d7555b909ba1c0c, 0x127fafdd937d11d2, 0x29da3badc66d92e4, 0xa2c1d57154c2ecbc,
-	0x58c5134d82f6fe24, 0x1c3ae3515b62274f, 0xe907c82e01cb8126, 0xf8ed091913e37fcb,
-	0x3249d8f9c80046c9, 0x80cf9bede388fb63, 0x1881539a116cf19e, 0x5103f3f76bd52457,
-	0x15b7e6f5ae47f7a8, 0xdbd7c6ded47e9ccf, 0x44e55c410228bb1a, 0xb647d4255edb4e99,
-	0x5d11882bb8aafc30, 0xf5098bbb29d3212a, 0x8fb5ea14e90296b3, 0x677b942157dd025a,
-	0xfb58e7c0a390acb5, 0x89d3674c83bd4a01, 0x9e2da4df4bf3b93b, 0xfcc41e328cab4829,
-	0x03f38c96ba582c52, 0xcad1bdbd7fd85db2, 0xbbb442c16082ae83, 0xb95fe86ba5da9ab0,
-	0xb22e04673771a93f, 0x845358c9493152d8, 0xbe2a488697b4541e, 0x95a2dc2dd38e6966,
-	0xc02c11ac923c852b, 0x2388b1990df2a87b, 0x7c8008fa1b4f37be, 0x1f70d0c84d54e503,
-	0x5490adec7ece57d4, 0x002b3c27d9063a3a, 0x7eaea3848030a2bf, 0xc602326ded2003c0,
-	0x83a7287d69a94086, 0xc57a5fcb30f57a8a, 0xb56844e479ebe779, 0xa373b40f05dcbce9,
-	0xd71a786e88570ee2, 0x879cbacdbde8f6a0, 0x976ad1bcc164a32f, 0xab21e25e9666d78b,
-	0x901063aae5e5c33c, 0x9818b34448698d90, 0xe36487ae3e1e8abb, 0xafbdf931893bdcb4,
-	0x6345a0dc5fbbd519, 0x8628fe269b9465ca, 0x1e5d01603f9c51ec, 0x4de44006a15049b7,
-	0xbf6c70e5f776cbb1, 0x411218f2ef552bed, 0xcb0c0708705a36a3, 0xe74d14754f986044,
-	0xcd56d9430ea8280e, 0xc12591d7535f5065, 0xc83223f1720aef96, 0xc3a0396f7363a51f,
-}
-
-Tiger_Context :: struct {
-    a:      u64,
-	b:      u64,
-	c:      u64,
-	x:      [64]byte,
-	nx:     int,
-	length: u64,
-	ver:    int,
-}
-
-round :: #force_inline proc "contextless" (a, b, c, x, mul: u64) -> (u64, u64, u64) {
-	a, b, c := a, b, c
-	c ~= x
-	a -= T1[c & 0xff] ~ T2[(c >> 16) & 0xff] ~ T3[(c >> 32) & 0xff] ~ T4[(c >> 48) & 0xff]
-	b += T4[(c >> 8) & 0xff] ~ T3[(c >> 24) & 0xff] ~ T2[(c >> 40) & 0xff] ~ T1[(c >> 56) & 0xff]
-	b *= mul
-	return a, b, c
-}
-
-pass :: #force_inline proc "contextless" (a, b, c: u64, d: []u64, mul: u64) -> (x, y, z: u64) {
-	x, y, z = round(a, b, c, d[0], mul)
-	y, z, x = round(y, z, x, d[1], mul)
-	z, x, y = round(z, x, y, d[2], mul)
-	x, y, z = round(x, y, z, d[3], mul)
-	y, z, x = round(y, z, x, d[4], mul)
-	z, x, y = round(z, x, y, d[5], mul)
-	x, y, z = round(x, y, z, d[6], mul)
-	y, z, x = round(y, z, x, d[7], mul)
-	return
-}
-
-key_schedule :: #force_inline proc "contextless" (x: []u64) {
-	x[0] -= x[7] ~ 0xa5a5a5a5a5a5a5a5
-	x[1] ~= x[0]
-	x[2] += x[1]
-	x[3] -= x[2] ~ ((~x[1]) << 19)
-	x[4] ~= x[3]
-	x[5] += x[4]
-	x[6] -= x[5] ~ ((~x[4]) >> 23)
-	x[7] ~= x[6]
-	x[0] += x[7]
-	x[1] -= x[0] ~ ((~x[7]) << 19)
-	x[2] ~= x[1]
-	x[3] += x[2]
-	x[4] -= x[3] ~ ((~x[2]) >> 23)
-	x[5] ~= x[4]
-	x[6] += x[5]
-	x[7] -= x[6] ~ 0x0123456789abcdef
-}
-
-compress :: #force_inline proc "contextless" (ctx: ^Tiger_Context, data: []byte) {
-	a := ctx.a
-	b := ctx.b
-	c := ctx.c
-	x := util.cast_slice([]u64, data)
-	ctx.a, ctx.b, ctx.c = pass(ctx.a, ctx.b, ctx.c, x, 5)
-	key_schedule(x)
-	ctx.c, ctx.a, ctx.b = pass(ctx.c, ctx.a, ctx.b, x, 7)
-	key_schedule(x)
-	ctx.b, ctx.c, ctx.a = pass(ctx.b, ctx.c, ctx.a, x, 9)
-	ctx.a ~= a
-	ctx.b -= b
-	ctx.c += c
-}
-
-init :: proc "contextless" (ctx: ^Tiger_Context) {
-	ctx.a = 0x0123456789abcdef
-	ctx.b = 0xfedcba9876543210
-	ctx.c = 0xf096a5b4c3b2e187
-}
-
-update :: proc(ctx: ^Tiger_Context, input: []byte) {
-	p := make([]byte, len(input))
-	copy(p, input)
-
-	length     := len(p)
-	ctx.length += u64(length)
-	if ctx.nx > 0 {
-		n := len(p)
-		if n > 64 - ctx.nx {
-			n = 64 - ctx.nx
-		}
-		copy(ctx.x[ctx.nx:ctx.nx + n], p[:n])
-		ctx.nx += n
-		if ctx.nx == 64 {
-			compress(ctx, ctx.x[:64 - 1])
-			ctx.nx = 0
-		}
-		p = p[n:]
-	}
-	for len(p) >= 64 {
-		compress(ctx, p[:64])
-		p = p[64:]
-	}
-	if len(p) > 0 {
-		ctx.nx = copy(ctx.x[:], p)
-	}
-}
-
-final :: proc(ctx: ^Tiger_Context, hash: []byte) {
-	length := ctx.length
-	tmp: [64]byte
-	if ctx.ver == 1 {
-		tmp[0] = 0x01
-	} else {
-		tmp[0] = 0x80
-	}
-
-	size := length & 0x3f
-	if size < 56 {
-		update(ctx, tmp[:56 - size])
-	} else {
-		update(ctx, tmp[:64 + 56 - size])
-	}
-
-	length <<= 3
-	for i := uint(0); i < 8; i += 1 {
-		tmp[i] = byte(length >> (8 * i))
-	}
-	update(ctx, tmp[:8])
-
-	for i := uint(0); i < 8; i += 1 {
-		tmp[i]      = byte(ctx.a >> (8 * i))
-		tmp[i + 8]  = byte(ctx.b >> (8 * i))
-		tmp[i + 16] = byte(ctx.c >> (8 * i))
-	}
-	copy(hash[:], tmp[:len(hash)])
-}

+ 0 - 726
core/crypto/blake/blake.odin

@@ -1,726 +0,0 @@
-package blake
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the BLAKE hashing algorithm, as defined in <https://web.archive.org/web/20190915215948/https://131002.net/blake>
-*/
-
-import "core:os"
-import "core:io"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_224 :: 28
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_384 :: 48
-DIGEST_SIZE_512 :: 64
-
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc "contextless" (data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224(transmute([]byte)(data))
-}
-
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Blake256_Context
-    ctx.is224 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Blake256_Context
-    ctx.is224 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Blake256_Context
-    ctx.is224 = true
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-    hash_stream_224,
-    hash_file_224,
-    hash_bytes_224,
-    hash_string_224,
-    hash_bytes_to_buffer_224,
-    hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc "contextless" (data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Blake256_Context
-    ctx.is224 = false
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Blake256_Context
-    ctx.is224 = false
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Blake256_Context
-    ctx.is224 = false
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc "contextless" (data: string) -> [DIGEST_SIZE_384]byte {
-    return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Blake512_Context
-    ctx.is384 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: Blake512_Context
-    ctx.is384 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Blake512_Context
-    ctx.is384 = true
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-    if !load_at_once {
-        return hash_stream_384(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_384(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-    hash_stream_384,
-    hash_file_384,
-    hash_bytes_384,
-    hash_string_384,
-    hash_bytes_to_buffer_384,
-    hash_string_to_buffer_384,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc "contextless" (data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Blake512_Context
-    ctx.is384 = false
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: Blake512_Context
-    ctx.is384 = false
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Blake512_Context
-    ctx.is384 = false
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc "contextless" (ctx: ^$T) {
-    when T == Blake256_Context {
-        if ctx.is224 {
-            ctx.h[0] = 0xc1059ed8
-            ctx.h[1] = 0x367cd507
-            ctx.h[2] = 0x3070dd17
-            ctx.h[3] = 0xf70e5939
-            ctx.h[4] = 0xffc00b31
-            ctx.h[5] = 0x68581511
-            ctx.h[6] = 0x64f98fa7
-            ctx.h[7] = 0xbefa4fa4
-        } else {
-            ctx.h[0] = 0x6a09e667
-            ctx.h[1] = 0xbb67ae85
-            ctx.h[2] = 0x3c6ef372
-            ctx.h[3] = 0xa54ff53a
-            ctx.h[4] = 0x510e527f
-            ctx.h[5] = 0x9b05688c
-            ctx.h[6] = 0x1f83d9ab
-            ctx.h[7] = 0x5be0cd19
-        }
-    } else when T == Blake512_Context {
-        if ctx.is384 {
-            ctx.h[0] = 0xcbbb9d5dc1059ed8
-            ctx.h[1] = 0x629a292a367cd507
-            ctx.h[2] = 0x9159015a3070dd17
-            ctx.h[3] = 0x152fecd8f70e5939
-            ctx.h[4] = 0x67332667ffc00b31
-            ctx.h[5] = 0x8eb44a8768581511
-            ctx.h[6] = 0xdb0c2e0d64f98fa7
-            ctx.h[7] = 0x47b5481dbefa4fa4
-        } else {
-            ctx.h[0] = 0x6a09e667f3bcc908
-            ctx.h[1] = 0xbb67ae8584caa73b
-            ctx.h[2] = 0x3c6ef372fe94f82b
-            ctx.h[3] = 0xa54ff53a5f1d36f1
-            ctx.h[4] = 0x510e527fade682d1
-            ctx.h[5] = 0x9b05688c2b3e6c1f
-            ctx.h[6] = 0x1f83d9abfb41bd6b
-            ctx.h[7] = 0x5be0cd19137e2179
-        }
-    }
-}
-
-update :: proc "contextless" (ctx: ^$T, data: []byte) {
-    data := data
-    when T == Blake256_Context {
-        if ctx.nx > 0 {
-            n := copy(ctx.x[ctx.nx:], data)
-            ctx.nx += n
-            if ctx.nx == BLOCKSIZE_256 {
-                block256(ctx, ctx.x[:])
-                ctx.nx = 0
-            }
-            data = data[n:]
-        }
-        if len(data) >= BLOCKSIZE_256 {
-            n := len(data) &~ (BLOCKSIZE_256 - 1)
-            block256(ctx, data[:n])
-            data = data[n:]
-        }
-        if len(data) > 0 {
-            ctx.nx = copy(ctx.x[:], data)
-        }
-    } else when T == Blake512_Context {
-        if ctx.nx > 0 {
-            n := copy(ctx.x[ctx.nx:], data)
-            ctx.nx += n
-            if ctx.nx == BLOCKSIZE_512 {
-                block512(ctx, ctx.x[:])
-                ctx.nx = 0
-            }
-            data = data[n:]
-        }
-        if len(data) >= BLOCKSIZE_512 {
-            n := len(data) &~ (BLOCKSIZE_512 - 1)
-            block512(ctx, data[:n])
-            data = data[n:]
-        }
-        if len(data) > 0 {
-            ctx.nx = copy(ctx.x[:], data)
-        }
-    }
-}
-
-final :: proc "contextless" (ctx: ^$T, hash: []byte) {
-    when T == Blake256_Context {
-        tmp: [65]byte
-    } else when T == Blake512_Context {
-        tmp: [129]byte
-    }
-    nx     := u64(ctx.nx)
-    tmp[0]  = 0x80
-    length := (ctx.t + nx) << 3
-
-    when T == Blake256_Context {
-        if nx == 55 {
-            if ctx.is224 {
-                write_additional(ctx, {0x80})
-            } else {
-                write_additional(ctx, {0x81})
-            }
-        } else {
-            if nx < 55 {
-                if nx == 0 {
-                    ctx.nullt = true
-                }
-                write_additional(ctx, tmp[0 : 55 - nx])
-            } else { 
-                write_additional(ctx, tmp[0 : 64 - nx])
-                write_additional(ctx, tmp[1:56])
-                ctx.nullt = true
-            }
-            if ctx.is224 {
-                write_additional(ctx, {0x00})
-            } else {
-                write_additional(ctx, {0x01})
-            }
-        }
-
-        for i : uint = 0; i < 8; i += 1 {
-            tmp[i] = byte(length >> (56 - 8 * i))
-        }
-        write_additional(ctx, tmp[0:8])
-
-        h := ctx.h[:]
-        if ctx.is224 {
-            h = h[0:7]
-        }
-        for s, i in h {
-            hash[i * 4]     = byte(s >> 24)
-            hash[i * 4 + 1] = byte(s >> 16)
-            hash[i * 4 + 2] = byte(s >> 8)
-            hash[i * 4 + 3] = byte(s)
-        }
-    } else when T == Blake512_Context {
-        if nx == 111 {
-            if ctx.is384 {
-                write_additional(ctx, {0x80})
-            } else {
-                write_additional(ctx, {0x81})
-            }
-        } else {
-            if nx < 111 {
-                if nx == 0 {
-                    ctx.nullt = true
-                }
-                write_additional(ctx, tmp[0 : 111 - nx])
-            } else { 
-                write_additional(ctx, tmp[0 : 128 - nx])
-                write_additional(ctx, tmp[1:112])
-                ctx.nullt = true
-            }
-            if ctx.is384 {
-                write_additional(ctx, {0x00})
-            } else {
-                write_additional(ctx, {0x01})
-            }
-        }
-
-        for i : uint = 0; i < 16; i += 1 {
-            tmp[i] = byte(length >> (120 - 8 * i))
-        }
-        write_additional(ctx, tmp[0:16])
-
-        h := ctx.h[:]
-        if ctx.is384 {
-            h = h[0:6]
-        }
-        for s, i in h {
-            hash[i * 8]     = byte(s >> 56)
-            hash[i * 8 + 1] = byte(s >> 48)
-            hash[i * 8 + 2] = byte(s >> 40)
-            hash[i * 8 + 3] = byte(s >> 32)
-            hash[i * 8 + 4] = byte(s >> 24)
-            hash[i * 8 + 5] = byte(s >> 16)
-            hash[i * 8 + 6] = byte(s >> 8)
-            hash[i * 8 + 7] = byte(s)
-        }
-    }
-}
-
-SIZE_224 :: 28
-SIZE_256 :: 32
-SIZE_384 :: 48
-SIZE_512 :: 64
-BLOCKSIZE_256 :: 64
-BLOCKSIZE_512 :: 128
-
-Blake256_Context :: struct {
-    h:     [8]u32,
-    s:     [4]u32,
-    t:     u64,
-    x:     [64]byte,
-    nx:    int,
-    is224: bool,
-    nullt: bool,
-}
-
-Blake512_Context :: struct {
-    h:     [8]u64,
-    s:     [4]u64,
-    t:     u64,
-    x:     [128]byte,
-    nx:    int,
-    is384: bool,
-    nullt: bool,
-}
-
-SIGMA := [?]int {
-    0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
-    14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
-    11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
-    7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
-    9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
-    2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
-    12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
-    13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
-    6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
-    10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
-}
-
-U256 := [16]u32 {
-    0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
-    0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
-    0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
-    0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
-}
-
-U512 := [16]u64 {
-    0x243f6a8885a308d3, 0x13198a2e03707344, 0xa4093822299f31d0, 0x082efa98ec4e6c89,
-    0x452821e638d01377, 0xbe5466cf34e90c6c, 0xc0ac29b7c97c50dd, 0x3f84d5b5b5470917,
-    0x9216d5d98979fb1b, 0xd1310ba698dfb5ac, 0x2ffd72dbd01adfb7, 0xb8e1afed6a267e96,
-    0xba7c9045f12c7f99, 0x24a19947b3916cf7, 0x0801f2e2858efc16, 0x636920d871574e69,
-}
-
-G256 :: #force_inline proc "contextless" (a, b, c, d: u32, m: [16]u32, i, j: int) -> (u32, u32, u32, u32) {
-    a, b, c, d := a, b, c, d
-    a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
-    a += b
-    d ~= a
-    d = d << (32 - 16) | d >> 16
-    c += d
-    b ~= c
-    b = b << (32 - 12) | b >> 12
-    a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U256[SIGMA[(i % 10) * 16 + (2 * j)]]
-    a += b
-    d ~= a
-    d = d << (32 - 8) | d >> 8
-    c += d
-    b ~= c
-    b = b << (32 - 7) | b >> 7
-    return a, b, c, d
-}
-
-G512 :: #force_inline proc "contextless" (a, b, c, d: u64, m: [16]u64, i, j: int) -> (u64, u64, u64, u64) {
-    a, b, c, d := a, b, c, d
-    a += m[SIGMA[(i % 10) * 16 + (2 * j)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j + 1)]]
-    a += b
-    d ~= a
-    d = d << (64 - 32) | d >> 32
-    c += d
-    b ~= c
-    b = b << (64 - 25) | b >> 25
-    a += m[SIGMA[(i % 10) * 16 + (2 * j + 1)]] ~ U512[SIGMA[(i % 10) * 16 + (2 * j)]]
-    a += b
-    d ~= a
-    d = d << (64 - 16) | d >> 16
-    c += d
-    b ~= c
-    b = b << (64 - 11) | b >> 11
-    return a, b, c, d
-}
-
-block256 :: proc "contextless" (ctx: ^Blake256_Context, p: []byte) #no_bounds_check {
-    i, j: int     = ---, ---
-    v, m: [16]u32 = ---, ---
-    p := p
-    for len(p) >= BLOCKSIZE_256 {
-        v[0]  = ctx.h[0]
-        v[1]  = ctx.h[1]
-        v[2]  = ctx.h[2]
-        v[3]  = ctx.h[3]
-        v[4]  = ctx.h[4]
-        v[5]  = ctx.h[5]
-        v[6]  = ctx.h[6]
-        v[7]  = ctx.h[7]
-        v[8]  = ctx.s[0] ~ U256[0]
-        v[9]  = ctx.s[1] ~ U256[1]
-        v[10] = ctx.s[2] ~ U256[2]
-        v[11] = ctx.s[3] ~ U256[3]
-        v[12] = U256[4]
-        v[13] = U256[5]
-        v[14] = U256[6]
-        v[15] = U256[7]
-
-        ctx.t += 512
-        if !ctx.nullt {
-            v[12] ~= u32(ctx.t)
-            v[13] ~= u32(ctx.t)
-            v[14] ~= u32(ctx.t >> 32)
-            v[15] ~= u32(ctx.t >> 32)
-        }
-
-        for i, j = 0, 0; i < 16; i, j = i+1, j+4 {
-            m[i] = u32(p[j]) << 24 | u32(p[j + 1]) << 16 | u32(p[j + 2]) << 8 | u32(p[j + 3])
-        }
-
-        for i = 0; i < 14; i += 1 {
-            v[0], v[4], v[8],  v[12] = G256(v[0], v[4], v[8],  v[12], m, i, 0)
-            v[1], v[5], v[9],  v[13] = G256(v[1], v[5], v[9],  v[13], m, i, 1)
-            v[2], v[6], v[10], v[14] = G256(v[2], v[6], v[10], v[14], m, i, 2)
-            v[3], v[7], v[11], v[15] = G256(v[3], v[7], v[11], v[15], m, i, 3)
-            v[0], v[5], v[10], v[15] = G256(v[0], v[5], v[10], v[15], m, i, 4)
-            v[1], v[6], v[11], v[12] = G256(v[1], v[6], v[11], v[12], m, i, 5)
-            v[2], v[7], v[8],  v[13] = G256(v[2], v[7], v[8],  v[13], m, i, 6)
-            v[3], v[4], v[9],  v[14] = G256(v[3], v[4], v[9],  v[14], m, i, 7)
-        }
-
-        for i = 0; i < 8; i += 1 {
-            ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
-        }
-        p = p[BLOCKSIZE_256:]
-    }
-}
-
-block512 :: proc "contextless" (ctx: ^Blake512_Context, p: []byte) #no_bounds_check {
-    i, j: int     = ---, ---
-    v, m: [16]u64 = ---, ---
-    p := p
-    for len(p) >= BLOCKSIZE_512 {
-        v[0]  = ctx.h[0]
-        v[1]  = ctx.h[1]
-        v[2]  = ctx.h[2]
-        v[3]  = ctx.h[3]
-        v[4]  = ctx.h[4]
-        v[5]  = ctx.h[5]
-        v[6]  = ctx.h[6]
-        v[7]  = ctx.h[7]
-        v[8]  = ctx.s[0] ~ U512[0]
-        v[9]  = ctx.s[1] ~ U512[1]
-        v[10] = ctx.s[2] ~ U512[2]
-        v[11] = ctx.s[3] ~ U512[3]
-        v[12] = U512[4]
-        v[13] = U512[5]
-        v[14] = U512[6]
-        v[15] = U512[7]
-
-        ctx.t += 1024
-        if !ctx.nullt {
-            v[12] ~= ctx.t
-            v[13] ~= ctx.t
-            v[14] ~= 0
-            v[15] ~= 0
-        }
-
-        for i, j = 0, 0; i < 16; i, j = i + 1, j + 8 {
-            m[i] = u64(p[j]) << 56     | u64(p[j + 1]) << 48 | u64(p[j + 2]) << 40 | u64(p[j + 3]) << 32 | 
-                   u64(p[j + 4]) << 24 | u64(p[j + 5]) << 16 | u64(p[j + 6]) << 8  | u64(p[j + 7])
-        }
-        for i = 0; i < 16; i += 1 {
-            v[0], v[4], v[8],  v[12] = G512(v[0], v[4], v[8],  v[12], m, i, 0)
-            v[1], v[5], v[9],  v[13] = G512(v[1], v[5], v[9],  v[13], m, i, 1)
-            v[2], v[6], v[10], v[14] = G512(v[2], v[6], v[10], v[14], m, i, 2)
-            v[3], v[7], v[11], v[15] = G512(v[3], v[7], v[11], v[15], m, i, 3)
-            v[0], v[5], v[10], v[15] = G512(v[0], v[5], v[10], v[15], m, i, 4)
-            v[1], v[6], v[11], v[12] = G512(v[1], v[6], v[11], v[12], m, i, 5)
-            v[2], v[7], v[8],  v[13] = G512(v[2], v[7], v[8],  v[13], m, i, 6)
-            v[3], v[4], v[9],  v[14] = G512(v[3], v[4], v[9],  v[14], m, i, 7)
-        }
-
-        for i = 0; i < 8; i += 1 {
-            ctx.h[i] ~= ctx.s[i % 4] ~ v[i] ~ v[i + 8]
-        }
-        p = p[BLOCKSIZE_512:]
-    }
-}
-
-write_additional :: proc "contextless" (ctx: ^$T, data: []byte) {
-	ctx.t -= u64(len(data)) << 3
-    update(ctx, data)
-}

+ 61 - 61
core/crypto/blake2b/blake2b.odin

@@ -7,12 +7,12 @@ package blake2b
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
 
 
-    Interface for the BLAKE2B hashing algorithm.
-    BLAKE2B and BLAKE2B share the implementation in the _blake2 package.
+    Interface for the BLAKE2b hashing algorithm.
+    BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
 */
 */
 
 
-import "core:os"
 import "core:io"
 import "core:io"
+import "core:os"
 
 
 import "../_blake2"
 import "../_blake2"
 
 
@@ -25,103 +25,103 @@ DIGEST_SIZE :: 64
 // hash_string will hash the given input and return the
 // hash_string will hash the given input and return the
 // computed hash
 // computed hash
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
+	return hash_bytes(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes will hash the given input and return the
 // hash_bytes will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: _blake2.Blake2b_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2B_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    _blake2.update(&ctx, data)
-    _blake2.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2B_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer will hash the given input and assign the
 // hash_string_to_buffer will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer will hash the given input and write the
 // hash_bytes_to_buffer will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: _blake2.Blake2b_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2B_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    _blake2.update(&ctx, data)
-    _blake2.final(&ctx, hash)
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2B_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
-
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: _blake2.Blake2b_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2B_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _blake2.update(&ctx, buf[:read])
-        } 
-    }
-    _blake2.final(&ctx, hash[:])
-    return hash, true 
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2B_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
+	if !load_at_once {
+		return hash_stream(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE]byte{}, false
 }
 }
 
 
 hash :: proc {
 hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
 }
 }
 
 
 /*
 /*
     Low level API
     Low level API
 */
 */
 
 
-Blake2b_Context :: _blake2.Blake2b_Context
+Context :: _blake2.Blake2b_Context
 
 
-init :: proc(ctx: ^_blake2.Blake2b_Context) {
-    _blake2.init(ctx)
+init :: proc(ctx: ^Context) {
+	_blake2.init(ctx)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, data: []byte) {
-    _blake2.update(ctx, data)
+update :: proc(ctx: ^Context, data: []byte) {
+	_blake2.update(ctx, data)
 }
 }
 
 
-final :: proc "contextless" (ctx: ^_blake2.Blake2b_Context, hash: []byte) {
-    _blake2.final(ctx, hash)
+final :: proc(ctx: ^Context, hash: []byte) {
+	_blake2.final(ctx, hash)
 }
 }

+ 61 - 61
core/crypto/blake2s/blake2s.odin

@@ -7,12 +7,12 @@ package blake2s
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
 
 
-    Interface for the BLAKE2S hashing algorithm.
-    BLAKE2B and BLAKE2B share the implementation in the _blake2 package.
+    Interface for the BLAKE2s hashing algorithm.
+    BLAKE2s and BLAKE2b share the implementation in the _blake2 package.
 */
 */
 
 
-import "core:os"
 import "core:io"
 import "core:io"
+import "core:os"
 
 
 import "../_blake2"
 import "../_blake2"
 
 
@@ -25,103 +25,103 @@ DIGEST_SIZE :: 32
 // hash_string will hash the given input and return the
 // hash_string will hash the given input and return the
 // computed hash
 // computed hash
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
+	return hash_bytes(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes will hash the given input and return the
 // hash_bytes will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: _blake2.Blake2s_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2S_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    _blake2.update(&ctx, data)
-    _blake2.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2S_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
-
 // hash_string_to_buffer will hash the given input and assign the
 // hash_string_to_buffer will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer will hash the given input and write the
 // hash_bytes_to_buffer will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: _blake2.Blake2s_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2S_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    _blake2.update(&ctx, data)
-    _blake2.final(&ctx, hash)
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2S_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: _blake2.Blake2s_Context
-    cfg: _blake2.Blake2_Config
-    cfg.size = _blake2.BLAKE2S_SIZE
-    ctx.cfg  = cfg
-    _blake2.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _blake2.update(&ctx, buf[:read])
-        } 
-    }
-    _blake2.final(&ctx, hash[:])
-    return hash, true 
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	cfg: _blake2.Blake2_Config
+	cfg.size = _blake2.BLAKE2S_SIZE
+	ctx.cfg = cfg
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
+	if !load_at_once {
+		return hash_stream(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE]byte{}, false
 }
 }
 
 
 hash :: proc {
 hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
 }
 }
 
 
 /*
 /*
     Low level API
     Low level API
 */
 */
 
 
-Blake2s_Context :: _blake2.Blake2b_Context
+Context :: _blake2.Blake2s_Context
 
 
-init :: proc(ctx: ^_blake2.Blake2s_Context) {
-    _blake2.init(ctx)
+init :: proc(ctx: ^Context) {
+	_blake2.init(ctx)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, data: []byte) {
-    _blake2.update(ctx, data)
+update :: proc(ctx: ^Context, data: []byte) {
+	_blake2.update(ctx, data)
 }
 }
 
 
-final :: proc "contextless" (ctx: ^_blake2.Blake2s_Context, hash: []byte) {
-    _blake2.final(ctx, hash)
+final :: proc(ctx: ^Context, hash: []byte) {
+	_blake2.final(ctx, hash)
 }
 }

+ 135 - 180
core/crypto/chacha20/chacha20.odin

@@ -1,6 +1,6 @@
 package chacha20
 package chacha20
 
 
-import "core:crypto/util"
+import "core:encoding/endian"
 import "core:math/bits"
 import "core:math/bits"
 import "core:mem"
 import "core:mem"
 
 
@@ -60,23 +60,23 @@ init :: proc (ctx: ^Context, key, nonce: []byte) {
 	ctx._s[1] = _SIGMA_1
 	ctx._s[1] = _SIGMA_1
 	ctx._s[2] = _SIGMA_2
 	ctx._s[2] = _SIGMA_2
 	ctx._s[3] = _SIGMA_3
 	ctx._s[3] = _SIGMA_3
-	ctx._s[4] = util.U32_LE(k[0:4])
-	ctx._s[5] = util.U32_LE(k[4:8])
-	ctx._s[6] = util.U32_LE(k[8:12])
-	ctx._s[7] = util.U32_LE(k[12:16])
-	ctx._s[8] = util.U32_LE(k[16:20])
-	ctx._s[9] = util.U32_LE(k[20:24])
-	ctx._s[10] = util.U32_LE(k[24:28])
-	ctx._s[11] = util.U32_LE(k[28:32])
+	ctx._s[4] = endian.unchecked_get_u32le(k[0:4])
+	ctx._s[5] = endian.unchecked_get_u32le(k[4:8])
+	ctx._s[6] = endian.unchecked_get_u32le(k[8:12])
+	ctx._s[7] = endian.unchecked_get_u32le(k[12:16])
+	ctx._s[8] = endian.unchecked_get_u32le(k[16:20])
+	ctx._s[9] = endian.unchecked_get_u32le(k[20:24])
+	ctx._s[10] = endian.unchecked_get_u32le(k[24:28])
+	ctx._s[11] = endian.unchecked_get_u32le(k[28:32])
 	ctx._s[12] = 0
 	ctx._s[12] = 0
 	if !is_xchacha {
 	if !is_xchacha {
-		ctx._s[13] = util.U32_LE(n[0:4])
-		ctx._s[14] = util.U32_LE(n[4:8])
-		ctx._s[15] = util.U32_LE(n[8:12])
+		ctx._s[13] = endian.unchecked_get_u32le(n[0:4])
+		ctx._s[14] = endian.unchecked_get_u32le(n[4:8])
+		ctx._s[15] = endian.unchecked_get_u32le(n[8:12])
 	} else {
 	} else {
 		ctx._s[13] = 0
 		ctx._s[13] = 0
-		ctx._s[14] = util.U32_LE(n[0:4])
-		ctx._s[15] = util.U32_LE(n[4:8])
+		ctx._s[14] = endian.unchecked_get_u32le(n[0:4])
+		ctx._s[15] = endian.unchecked_get_u32le(n[4:8])
 
 
 		// The sub-key is stored in the keystream buffer.  While
 		// The sub-key is stored in the keystream buffer.  While
 		// this will be overwritten in most circumstances, explicitly
 		// this will be overwritten in most circumstances, explicitly
@@ -221,114 +221,114 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
 			// quarterround(x, 0, 4, 8, 12)
 			// quarterround(x, 0, 4, 8, 12)
 			x0 += x4
 			x0 += x4
 			x12 ~= x0
 			x12 ~= x0
-			x12 = util.ROTL32(x12, 16)
+			x12 = bits.rotate_left32(x12, 16)
 			x8 += x12
 			x8 += x12
 			x4 ~= x8
 			x4 ~= x8
-			x4 = util.ROTL32(x4, 12)
+			x4 = bits.rotate_left32(x4, 12)
 			x0 += x4
 			x0 += x4
 			x12 ~= x0
 			x12 ~= x0
-			x12 = util.ROTL32(x12, 8)
+			x12 = bits.rotate_left32(x12, 8)
 			x8 += x12
 			x8 += x12
 			x4 ~= x8
 			x4 ~= x8
-			x4 = util.ROTL32(x4, 7)
+			x4 = bits.rotate_left32(x4, 7)
 
 
 			// quarterround(x, 1, 5, 9, 13)
 			// quarterround(x, 1, 5, 9, 13)
 			x1 += x5
 			x1 += x5
 			x13 ~= x1
 			x13 ~= x1
-			x13 = util.ROTL32(x13, 16)
+			x13 = bits.rotate_left32(x13, 16)
 			x9 += x13
 			x9 += x13
 			x5 ~= x9
 			x5 ~= x9
-			x5 = util.ROTL32(x5, 12)
+			x5 = bits.rotate_left32(x5, 12)
 			x1 += x5
 			x1 += x5
 			x13 ~= x1
 			x13 ~= x1
-			x13 = util.ROTL32(x13, 8)
+			x13 = bits.rotate_left32(x13, 8)
 			x9 += x13
 			x9 += x13
 			x5 ~= x9
 			x5 ~= x9
-			x5 = util.ROTL32(x5, 7)
+			x5 = bits.rotate_left32(x5, 7)
 
 
 			// quarterround(x, 2, 6, 10, 14)
 			// quarterround(x, 2, 6, 10, 14)
 			x2 += x6
 			x2 += x6
 			x14 ~= x2
 			x14 ~= x2
-			x14 = util.ROTL32(x14, 16)
+			x14 = bits.rotate_left32(x14, 16)
 			x10 += x14
 			x10 += x14
 			x6 ~= x10
 			x6 ~= x10
-			x6 = util.ROTL32(x6, 12)
+			x6 = bits.rotate_left32(x6, 12)
 			x2 += x6
 			x2 += x6
 			x14 ~= x2
 			x14 ~= x2
-			x14 = util.ROTL32(x14, 8)
+			x14 = bits.rotate_left32(x14, 8)
 			x10 += x14
 			x10 += x14
 			x6 ~= x10
 			x6 ~= x10
-			x6 = util.ROTL32(x6, 7)
+			x6 = bits.rotate_left32(x6, 7)
 
 
 			// quarterround(x, 3, 7, 11, 15)
 			// quarterround(x, 3, 7, 11, 15)
 			x3 += x7
 			x3 += x7
 			x15 ~= x3
 			x15 ~= x3
-			x15 = util.ROTL32(x15, 16)
+			x15 = bits.rotate_left32(x15, 16)
 			x11 += x15
 			x11 += x15
 			x7 ~= x11
 			x7 ~= x11
-			x7 = util.ROTL32(x7, 12)
+			x7 = bits.rotate_left32(x7, 12)
 			x3 += x7
 			x3 += x7
 			x15 ~= x3
 			x15 ~= x3
-			x15 = util.ROTL32(x15, 8)
+			x15 = bits.rotate_left32(x15, 8)
 			x11 += x15
 			x11 += x15
 			x7 ~= x11
 			x7 ~= x11
-			x7 = util.ROTL32(x7, 7)
+			x7 = bits.rotate_left32(x7, 7)
 
 
 			// quarterround(x, 0, 5, 10, 15)
 			// quarterround(x, 0, 5, 10, 15)
 			x0 += x5
 			x0 += x5
 			x15 ~= x0
 			x15 ~= x0
-			x15 = util.ROTL32(x15, 16)
+			x15 = bits.rotate_left32(x15, 16)
 			x10 += x15
 			x10 += x15
 			x5 ~= x10
 			x5 ~= x10
-			x5 = util.ROTL32(x5, 12)
+			x5 = bits.rotate_left32(x5, 12)
 			x0 += x5
 			x0 += x5
 			x15 ~= x0
 			x15 ~= x0
-			x15 = util.ROTL32(x15, 8)
+			x15 = bits.rotate_left32(x15, 8)
 			x10 += x15
 			x10 += x15
 			x5 ~= x10
 			x5 ~= x10
-			x5 = util.ROTL32(x5, 7)
+			x5 = bits.rotate_left32(x5, 7)
 
 
 			// quarterround(x, 1, 6, 11, 12)
 			// quarterround(x, 1, 6, 11, 12)
 			x1 += x6
 			x1 += x6
 			x12 ~= x1
 			x12 ~= x1
-			x12 = util.ROTL32(x12, 16)
+			x12 = bits.rotate_left32(x12, 16)
 			x11 += x12
 			x11 += x12
 			x6 ~= x11
 			x6 ~= x11
-			x6 = util.ROTL32(x6, 12)
+			x6 = bits.rotate_left32(x6, 12)
 			x1 += x6
 			x1 += x6
 			x12 ~= x1
 			x12 ~= x1
-			x12 = util.ROTL32(x12, 8)
+			x12 = bits.rotate_left32(x12, 8)
 			x11 += x12
 			x11 += x12
 			x6 ~= x11
 			x6 ~= x11
-			x6 = util.ROTL32(x6, 7)
+			x6 = bits.rotate_left32(x6, 7)
 
 
 			// quarterround(x, 2, 7, 8, 13)
 			// quarterround(x, 2, 7, 8, 13)
 			x2 += x7
 			x2 += x7
 			x13 ~= x2
 			x13 ~= x2
-			x13 = util.ROTL32(x13, 16)
+			x13 = bits.rotate_left32(x13, 16)
 			x8 += x13
 			x8 += x13
 			x7 ~= x8
 			x7 ~= x8
-			x7 = util.ROTL32(x7, 12)
+			x7 = bits.rotate_left32(x7, 12)
 			x2 += x7
 			x2 += x7
 			x13 ~= x2
 			x13 ~= x2
-			x13 = util.ROTL32(x13, 8)
+			x13 = bits.rotate_left32(x13, 8)
 			x8 += x13
 			x8 += x13
 			x7 ~= x8
 			x7 ~= x8
-			x7 = util.ROTL32(x7, 7)
+			x7 = bits.rotate_left32(x7, 7)
 
 
 			// quarterround(x, 3, 4, 9, 14)
 			// quarterround(x, 3, 4, 9, 14)
 			x3 += x4
 			x3 += x4
 			x14 ~= x3
 			x14 ~= x3
-			x14 = util.ROTL32(x14, 16)
+			x14 = bits.rotate_left32(x14, 16)
 			x9 += x14
 			x9 += x14
 			x4 ~= x9
 			x4 ~= x9
-			x4 = util.ROTL32(x4, 12)
+			x4 = bits.rotate_left32(x4, 12)
 			x3 += x4
 			x3 += x4
 			x14 ~= x3
 			x14 ~= x3
-			x14 = util.ROTL32(x14, 8)
+			x14 = bits.rotate_left32(x14, 8)
 			x9 += x14
 			x9 += x14
 			x4 ~= x9
 			x4 ~= x9
-			x4 = util.ROTL32(x4, 7)
+			x4 = bits.rotate_left32(x4, 7)
 		}
 		}
 
 
 		x0 += _SIGMA_0
 		x0 += _SIGMA_0
@@ -352,93 +352,48 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
 		// this is "use vector operations", support for that is currently
 		// this is "use vector operations", support for that is currently
 		// a work in progress/to be designed.
 		// a work in progress/to be designed.
 		//
 		//
-		// Until dedicated assembly can be written leverage the fact that
-		// the callers of this routine ensure that src/dst are valid.
+		// In the meantime:
+		// - The caller(s) ensure that src/dst are valid.
+		// - The compiler knows if the target is picky about alignment.
 
 
-		when ODIN_ARCH == .i386 || ODIN_ARCH == .amd64 {
-			// util.PUT_U32_LE/util.U32_LE are not required on little-endian
-			// systems that also happen to not be strict about aligned
-			// memory access.
-
-			dst_p := transmute(^[16]u32)(&dst[0])
+		#no_bounds_check {
 			if src != nil {
 			if src != nil {
-				src_p := transmute(^[16]u32)(&src[0])
-				dst_p[0] = src_p[0] ~ x0
-				dst_p[1] = src_p[1] ~ x1
-				dst_p[2] = src_p[2] ~ x2
-				dst_p[3] = src_p[3] ~ x3
-				dst_p[4] = src_p[4] ~ x4
-				dst_p[5] = src_p[5] ~ x5
-				dst_p[6] = src_p[6] ~ x6
-				dst_p[7] = src_p[7] ~ x7
-				dst_p[8] = src_p[8] ~ x8
-				dst_p[9] = src_p[9] ~ x9
-				dst_p[10] = src_p[10] ~ x10
-				dst_p[11] = src_p[11] ~ x11
-				dst_p[12] = src_p[12] ~ x12
-				dst_p[13] = src_p[13] ~ x13
-				dst_p[14] = src_p[14] ~ x14
-				dst_p[15] = src_p[15] ~ x15
+				endian.unchecked_put_u32le(dst[0:4], endian.unchecked_get_u32le(src[0:4]) ~ x0)
+				endian.unchecked_put_u32le(dst[4:8], endian.unchecked_get_u32le(src[4:8]) ~ x1)
+				endian.unchecked_put_u32le(dst[8:12], endian.unchecked_get_u32le(src[8:12]) ~ x2)
+				endian.unchecked_put_u32le(dst[12:16], endian.unchecked_get_u32le(src[12:16]) ~ x3)
+				endian.unchecked_put_u32le(dst[16:20], endian.unchecked_get_u32le(src[16:20]) ~ x4)
+				endian.unchecked_put_u32le(dst[20:24], endian.unchecked_get_u32le(src[20:24]) ~ x5)
+				endian.unchecked_put_u32le(dst[24:28], endian.unchecked_get_u32le(src[24:28]) ~ x6)
+				endian.unchecked_put_u32le(dst[28:32], endian.unchecked_get_u32le(src[28:32]) ~ x7)
+				endian.unchecked_put_u32le(dst[32:36], endian.unchecked_get_u32le(src[32:36]) ~ x8)
+				endian.unchecked_put_u32le(dst[36:40], endian.unchecked_get_u32le(src[36:40]) ~ x9)
+				endian.unchecked_put_u32le(dst[40:44], endian.unchecked_get_u32le(src[40:44]) ~ x10)
+				endian.unchecked_put_u32le(dst[44:48], endian.unchecked_get_u32le(src[44:48]) ~ x11)
+				endian.unchecked_put_u32le(dst[48:52], endian.unchecked_get_u32le(src[48:52]) ~ x12)
+				endian.unchecked_put_u32le(dst[52:56], endian.unchecked_get_u32le(src[52:56]) ~ x13)
+				endian.unchecked_put_u32le(dst[56:60], endian.unchecked_get_u32le(src[56:60]) ~ x14)
+				endian.unchecked_put_u32le(dst[60:64], endian.unchecked_get_u32le(src[60:64]) ~ x15)
 				src = src[_BLOCK_SIZE:]
 				src = src[_BLOCK_SIZE:]
 			} else {
 			} else {
-				dst_p[0] = x0
-				dst_p[1] = x1
-				dst_p[2] = x2
-				dst_p[3] = x3
-				dst_p[4] = x4
-				dst_p[5] = x5
-				dst_p[6] = x6
-				dst_p[7] = x7
-				dst_p[8] = x8
-				dst_p[9] = x9
-				dst_p[10] = x10
-				dst_p[11] = x11
-				dst_p[12] = x12
-				dst_p[13] = x13
-				dst_p[14] = x14
-				dst_p[15] = x15
+				endian.unchecked_put_u32le(dst[0:4], x0)
+				endian.unchecked_put_u32le(dst[4:8], x1)
+				endian.unchecked_put_u32le(dst[8:12], x2)
+				endian.unchecked_put_u32le(dst[12:16], x3)
+				endian.unchecked_put_u32le(dst[16:20], x4)
+				endian.unchecked_put_u32le(dst[20:24], x5)
+				endian.unchecked_put_u32le(dst[24:28], x6)
+				endian.unchecked_put_u32le(dst[28:32], x7)
+				endian.unchecked_put_u32le(dst[32:36], x8)
+				endian.unchecked_put_u32le(dst[36:40], x9)
+				endian.unchecked_put_u32le(dst[40:44], x10)
+				endian.unchecked_put_u32le(dst[44:48], x11)
+				endian.unchecked_put_u32le(dst[48:52], x12)
+				endian.unchecked_put_u32le(dst[52:56], x13)
+				endian.unchecked_put_u32le(dst[56:60], x14)
+				endian.unchecked_put_u32le(dst[60:64], x15)
 			}
 			}
 			dst = dst[_BLOCK_SIZE:]
 			dst = dst[_BLOCK_SIZE:]
-		} else {
-			#no_bounds_check {
-				if src != nil {
-					util.PUT_U32_LE(dst[0:4], util.U32_LE(src[0:4]) ~ x0)
-					util.PUT_U32_LE(dst[4:8], util.U32_LE(src[4:8]) ~ x1)
-					util.PUT_U32_LE(dst[8:12], util.U32_LE(src[8:12]) ~ x2)
-					util.PUT_U32_LE(dst[12:16], util.U32_LE(src[12:16]) ~ x3)
-					util.PUT_U32_LE(dst[16:20], util.U32_LE(src[16:20]) ~ x4)
-					util.PUT_U32_LE(dst[20:24], util.U32_LE(src[20:24]) ~ x5)
-					util.PUT_U32_LE(dst[24:28], util.U32_LE(src[24:28]) ~ x6)
-					util.PUT_U32_LE(dst[28:32], util.U32_LE(src[28:32]) ~ x7)
-					util.PUT_U32_LE(dst[32:36], util.U32_LE(src[32:36]) ~ x8)
-					util.PUT_U32_LE(dst[36:40], util.U32_LE(src[36:40]) ~ x9)
-					util.PUT_U32_LE(dst[40:44], util.U32_LE(src[40:44]) ~ x10)
-					util.PUT_U32_LE(dst[44:48], util.U32_LE(src[44:48]) ~ x11)
-					util.PUT_U32_LE(dst[48:52], util.U32_LE(src[48:52]) ~ x12)
-					util.PUT_U32_LE(dst[52:56], util.U32_LE(src[52:56]) ~ x13)
-					util.PUT_U32_LE(dst[56:60], util.U32_LE(src[56:60]) ~ x14)
-					util.PUT_U32_LE(dst[60:64], util.U32_LE(src[60:64]) ~ x15)
-					src = src[_BLOCK_SIZE:]
-				} else {
-					util.PUT_U32_LE(dst[0:4], x0)
-					util.PUT_U32_LE(dst[4:8], x1)
-					util.PUT_U32_LE(dst[8:12], x2)
-					util.PUT_U32_LE(dst[12:16], x3)
-					util.PUT_U32_LE(dst[16:20], x4)
-					util.PUT_U32_LE(dst[20:24], x5)
-					util.PUT_U32_LE(dst[24:28], x6)
-					util.PUT_U32_LE(dst[28:32], x7)
-					util.PUT_U32_LE(dst[32:36], x8)
-					util.PUT_U32_LE(dst[36:40], x9)
-					util.PUT_U32_LE(dst[40:44], x10)
-					util.PUT_U32_LE(dst[44:48], x11)
-					util.PUT_U32_LE(dst[48:52], x12)
-					util.PUT_U32_LE(dst[52:56], x13)
-					util.PUT_U32_LE(dst[56:60], x14)
-					util.PUT_U32_LE(dst[60:64], x15)
-				}
-				dst = dst[_BLOCK_SIZE:]
-			}
 		}
 		}
 
 
 		// Increment the counter.  Overflow checking is done upon
 		// Increment the counter.  Overflow checking is done upon
@@ -451,141 +406,141 @@ _do_blocks :: proc (ctx: ^Context, dst, src: []byte, nr_blocks: int) {
 }
 }
 
 
 @(private)
 @(private)
-_hchacha20 :: proc (dst, key, nonce: []byte) {
+_hchacha20 :: proc "contextless" (dst, key, nonce: []byte) {
 	x0, x1, x2, x3 := _SIGMA_0, _SIGMA_1, _SIGMA_2, _SIGMA_3
 	x0, x1, x2, x3 := _SIGMA_0, _SIGMA_1, _SIGMA_2, _SIGMA_3
-	x4 := util.U32_LE(key[0:4])
-	x5 := util.U32_LE(key[4:8])
-	x6 := util.U32_LE(key[8:12])
-	x7 := util.U32_LE(key[12:16])
-	x8 := util.U32_LE(key[16:20])
-	x9 := util.U32_LE(key[20:24])
-	x10 := util.U32_LE(key[24:28])
-	x11 := util.U32_LE(key[28:32])
-	x12 := util.U32_LE(nonce[0:4])
-	x13 := util.U32_LE(nonce[4:8])
-	x14 := util.U32_LE(nonce[8:12])
-	x15 := util.U32_LE(nonce[12:16])
+	x4 := endian.unchecked_get_u32le(key[0:4])
+	x5 := endian.unchecked_get_u32le(key[4:8])
+	x6 := endian.unchecked_get_u32le(key[8:12])
+	x7 := endian.unchecked_get_u32le(key[12:16])
+	x8 := endian.unchecked_get_u32le(key[16:20])
+	x9 := endian.unchecked_get_u32le(key[20:24])
+	x10 := endian.unchecked_get_u32le(key[24:28])
+	x11 := endian.unchecked_get_u32le(key[28:32])
+	x12 := endian.unchecked_get_u32le(nonce[0:4])
+	x13 := endian.unchecked_get_u32le(nonce[4:8])
+	x14 := endian.unchecked_get_u32le(nonce[8:12])
+	x15 := endian.unchecked_get_u32le(nonce[12:16])
 
 
 	for i := _ROUNDS; i > 0; i = i - 2 {
 	for i := _ROUNDS; i > 0; i = i - 2 {
 		// quarterround(x, 0, 4, 8, 12)
 		// quarterround(x, 0, 4, 8, 12)
 		x0 += x4
 		x0 += x4
 		x12 ~= x0
 		x12 ~= x0
-		x12 = util.ROTL32(x12, 16)
+		x12 = bits.rotate_left32(x12, 16)
 		x8 += x12
 		x8 += x12
 		x4 ~= x8
 		x4 ~= x8
-		x4 = util.ROTL32(x4, 12)
+		x4 = bits.rotate_left32(x4, 12)
 		x0 += x4
 		x0 += x4
 		x12 ~= x0
 		x12 ~= x0
-		x12 = util.ROTL32(x12, 8)
+		x12 = bits.rotate_left32(x12, 8)
 		x8 += x12
 		x8 += x12
 		x4 ~= x8
 		x4 ~= x8
-		x4 = util.ROTL32(x4, 7)
+		x4 = bits.rotate_left32(x4, 7)
 
 
 		// quarterround(x, 1, 5, 9, 13)
 		// quarterround(x, 1, 5, 9, 13)
 		x1 += x5
 		x1 += x5
 		x13 ~= x1
 		x13 ~= x1
-		x13 = util.ROTL32(x13, 16)
+		x13 = bits.rotate_left32(x13, 16)
 		x9 += x13
 		x9 += x13
 		x5 ~= x9
 		x5 ~= x9
-		x5 = util.ROTL32(x5, 12)
+		x5 = bits.rotate_left32(x5, 12)
 		x1 += x5
 		x1 += x5
 		x13 ~= x1
 		x13 ~= x1
-		x13 = util.ROTL32(x13, 8)
+		x13 = bits.rotate_left32(x13, 8)
 		x9 += x13
 		x9 += x13
 		x5 ~= x9
 		x5 ~= x9
-		x5 = util.ROTL32(x5, 7)
+		x5 = bits.rotate_left32(x5, 7)
 
 
 		// quarterround(x, 2, 6, 10, 14)
 		// quarterround(x, 2, 6, 10, 14)
 		x2 += x6
 		x2 += x6
 		x14 ~= x2
 		x14 ~= x2
-		x14 = util.ROTL32(x14, 16)
+		x14 = bits.rotate_left32(x14, 16)
 		x10 += x14
 		x10 += x14
 		x6 ~= x10
 		x6 ~= x10
-		x6 = util.ROTL32(x6, 12)
+		x6 = bits.rotate_left32(x6, 12)
 		x2 += x6
 		x2 += x6
 		x14 ~= x2
 		x14 ~= x2
-		x14 = util.ROTL32(x14, 8)
+		x14 = bits.rotate_left32(x14, 8)
 		x10 += x14
 		x10 += x14
 		x6 ~= x10
 		x6 ~= x10
-		x6 = util.ROTL32(x6, 7)
+		x6 = bits.rotate_left32(x6, 7)
 
 
 		// quarterround(x, 3, 7, 11, 15)
 		// quarterround(x, 3, 7, 11, 15)
 		x3 += x7
 		x3 += x7
 		x15 ~= x3
 		x15 ~= x3
-		x15 = util.ROTL32(x15, 16)
+		x15 = bits.rotate_left32(x15, 16)
 		x11 += x15
 		x11 += x15
 		x7 ~= x11
 		x7 ~= x11
-		x7 = util.ROTL32(x7, 12)
+		x7 = bits.rotate_left32(x7, 12)
 		x3 += x7
 		x3 += x7
 		x15 ~= x3
 		x15 ~= x3
-		x15 = util.ROTL32(x15, 8)
+		x15 = bits.rotate_left32(x15, 8)
 		x11 += x15
 		x11 += x15
 		x7 ~= x11
 		x7 ~= x11
-		x7 = util.ROTL32(x7, 7)
+		x7 = bits.rotate_left32(x7, 7)
 
 
 		// quarterround(x, 0, 5, 10, 15)
 		// quarterround(x, 0, 5, 10, 15)
 		x0 += x5
 		x0 += x5
 		x15 ~= x0
 		x15 ~= x0
-		x15 = util.ROTL32(x15, 16)
+		x15 = bits.rotate_left32(x15, 16)
 		x10 += x15
 		x10 += x15
 		x5 ~= x10
 		x5 ~= x10
-		x5 = util.ROTL32(x5, 12)
+		x5 = bits.rotate_left32(x5, 12)
 		x0 += x5
 		x0 += x5
 		x15 ~= x0
 		x15 ~= x0
-		x15 = util.ROTL32(x15, 8)
+		x15 = bits.rotate_left32(x15, 8)
 		x10 += x15
 		x10 += x15
 		x5 ~= x10
 		x5 ~= x10
-		x5 = util.ROTL32(x5, 7)
+		x5 = bits.rotate_left32(x5, 7)
 
 
 		// quarterround(x, 1, 6, 11, 12)
 		// quarterround(x, 1, 6, 11, 12)
 		x1 += x6
 		x1 += x6
 		x12 ~= x1
 		x12 ~= x1
-		x12 = util.ROTL32(x12, 16)
+		x12 = bits.rotate_left32(x12, 16)
 		x11 += x12
 		x11 += x12
 		x6 ~= x11
 		x6 ~= x11
-		x6 = util.ROTL32(x6, 12)
+		x6 = bits.rotate_left32(x6, 12)
 		x1 += x6
 		x1 += x6
 		x12 ~= x1
 		x12 ~= x1
-		x12 = util.ROTL32(x12, 8)
+		x12 = bits.rotate_left32(x12, 8)
 		x11 += x12
 		x11 += x12
 		x6 ~= x11
 		x6 ~= x11
-		x6 = util.ROTL32(x6, 7)
+		x6 = bits.rotate_left32(x6, 7)
 
 
 		// quarterround(x, 2, 7, 8, 13)
 		// quarterround(x, 2, 7, 8, 13)
 		x2 += x7
 		x2 += x7
 		x13 ~= x2
 		x13 ~= x2
-		x13 = util.ROTL32(x13, 16)
+		x13 = bits.rotate_left32(x13, 16)
 		x8 += x13
 		x8 += x13
 		x7 ~= x8
 		x7 ~= x8
-		x7 = util.ROTL32(x7, 12)
+		x7 = bits.rotate_left32(x7, 12)
 		x2 += x7
 		x2 += x7
 		x13 ~= x2
 		x13 ~= x2
-		x13 = util.ROTL32(x13, 8)
+		x13 = bits.rotate_left32(x13, 8)
 		x8 += x13
 		x8 += x13
 		x7 ~= x8
 		x7 ~= x8
-		x7 = util.ROTL32(x7, 7)
+		x7 = bits.rotate_left32(x7, 7)
 
 
 		// quarterround(x, 3, 4, 9, 14)
 		// quarterround(x, 3, 4, 9, 14)
 		x3 += x4
 		x3 += x4
 		x14 ~= x3
 		x14 ~= x3
-		x14 = util.ROTL32(x14, 16)
+		x14 = bits.rotate_left32(x14, 16)
 		x9 += x14
 		x9 += x14
 		x4 ~= x9
 		x4 ~= x9
-		x4 = util.ROTL32(x4, 12)
+		x4 = bits.rotate_left32(x4, 12)
 		x3 += x4
 		x3 += x4
 		x14 ~= x3
 		x14 ~= x3
-		x14 = util.ROTL32(x14, 8)
+		x14 = bits.rotate_left32(x14, 8)
 		x9 += x14
 		x9 += x14
 		x4 ~= x9
 		x4 ~= x9
-		x4 = util.ROTL32(x4, 7)
+		x4 = bits.rotate_left32(x4, 7)
 	}
 	}
 
 
-	util.PUT_U32_LE(dst[0:4], x0)
-	util.PUT_U32_LE(dst[4:8], x1)
-	util.PUT_U32_LE(dst[8:12], x2)
-	util.PUT_U32_LE(dst[12:16], x3)
-	util.PUT_U32_LE(dst[16:20], x12)
-	util.PUT_U32_LE(dst[20:24], x13)
-	util.PUT_U32_LE(dst[24:28], x14)
-	util.PUT_U32_LE(dst[28:32], x15)
+	endian.unchecked_put_u32le(dst[0:4], x0)
+	endian.unchecked_put_u32le(dst[4:8], x1)
+	endian.unchecked_put_u32le(dst[8:12], x2)
+	endian.unchecked_put_u32le(dst[12:16], x3)
+	endian.unchecked_put_u32le(dst[16:20], x12)
+	endian.unchecked_put_u32le(dst[20:24], x13)
+	endian.unchecked_put_u32le(dst[24:28], x14)
+	endian.unchecked_put_u32le(dst[28:32], x15)
 }
 }

+ 5 - 5
core/crypto/chacha20poly1305/chacha20poly1305.odin

@@ -3,7 +3,7 @@ package chacha20poly1305
 import "core:crypto"
 import "core:crypto"
 import "core:crypto/chacha20"
 import "core:crypto/chacha20"
 import "core:crypto/poly1305"
 import "core:crypto/poly1305"
-import "core:crypto/util"
+import "core:encoding/endian"
 import "core:mem"
 import "core:mem"
 
 
 KEY_SIZE :: chacha20.KEY_SIZE
 KEY_SIZE :: chacha20.KEY_SIZE
@@ -87,8 +87,8 @@ encrypt :: proc (ciphertext, tag, key, nonce, aad, plaintext: []byte) {
 	// mac_data |= num_to_8_le_bytes(aad.length)
 	// mac_data |= num_to_8_le_bytes(aad.length)
 	// mac_data |= num_to_8_le_bytes(ciphertext.length)
 	// mac_data |= num_to_8_le_bytes(ciphertext.length)
 	l_buf := otk[0:16] // Reuse the scratch buffer.
 	l_buf := otk[0:16] // Reuse the scratch buffer.
-	util.PUT_U64_LE(l_buf[0:8], u64(aad_len))
-	util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len))
+	endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len))
+	endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len))
 	poly1305.update(&mac_ctx, l_buf)
 	poly1305.update(&mac_ctx, l_buf)
 
 
 	// tag = poly1305_mac(mac_data, otk)
 	// tag = poly1305_mac(mac_data, otk)
@@ -128,8 +128,8 @@ decrypt :: proc (plaintext, tag, key, nonce, aad, ciphertext: []byte) -> bool {
 	poly1305.update(&mac_ctx, ciphertext)
 	poly1305.update(&mac_ctx, ciphertext)
 	_update_mac_pad16(&mac_ctx, ciphertext_len)
 	_update_mac_pad16(&mac_ctx, ciphertext_len)
 	l_buf := otk[0:16] // Reuse the scratch buffer.
 	l_buf := otk[0:16] // Reuse the scratch buffer.
-	util.PUT_U64_LE(l_buf[0:8], u64(aad_len))
-	util.PUT_U64_LE(l_buf[8:16], u64(ciphertext_len))
+	endian.unchecked_put_u64le(l_buf[0:8], u64(aad_len))
+	endian.unchecked_put_u64le(l_buf[8:16], u64(ciphertext_len))
 	poly1305.update(&mac_ctx, l_buf)
 	poly1305.update(&mac_ctx, l_buf)
 
 
 	// tag = poly1305_mac(mac_data, otk)
 	// tag = poly1305_mac(mac_data, otk)

+ 0 - 382
core/crypto/gost/gost.odin

@@ -1,382 +0,0 @@
-package gost
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the GOST hashing algorithm, as defined in RFC 5831 <https://datatracker.ietf.org/doc/html/rfc5831>
-*/
-
-import "core:mem"
-import "core:os"
-import "core:io"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 32
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: Gost_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Gost_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: Gost_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc "contextless" (ctx: ^Gost_Context) {
-    sbox: [8][16]u32 = {
-        { 10, 4,  5,  6,  8,  1,  3,  7,  13, 12, 14, 0,  9,  2,  11, 15 },
-        { 5,  15, 4,  0,  2,  13, 11, 9,  1,  7,  6,  3,  12, 14, 10, 8  },
-        { 7,  15, 12, 14, 9,  4,  1,  0,  3,  11, 5,  2,  6,  10, 8,  13 },
-        { 4,  10, 7,  12, 0,  15, 2,  8,  14, 1,  6,  5,  13, 11, 9,  3  },
-        { 7,  6,  4,  11, 9,  12, 2,  10, 1,  8,  0,  14, 15, 13, 3,  5  },
-        { 7,  6,  2,  4,  13, 9,  15, 0,  10, 1,  5,  11, 8,  14, 12, 3  },
-        { 13, 14, 4,  1,  7,  0,  5,  10, 3,  12, 8,  15, 6,  2,  9,  11 },
-        { 1,  3,  10, 9,  5,  11, 4,  15, 8,  6,  7,  14, 13, 0,  2,  12 },
-    }
-
-    i := 0
-    for a := 0; a < 16; a += 1 {
-        ax := sbox[1][a] << 15
-        bx := sbox[3][a] << 23
-        cx := sbox[5][a]
-        cx = (cx >> 1) | (cx << 31)
-        dx := sbox[7][a] << 7
-        for b := 0; b < 16; b, i = b + 1, i + 1 {
-            SBOX_1[i] = ax | (sbox[0][b] << 11)
-            SBOX_2[i] = bx | (sbox[2][b] << 19)
-            SBOX_3[i] = cx | (sbox[4][b] << 27)
-            SBOX_4[i] = dx | (sbox[6][b] << 3)
-        }
-    }
-}
-
-update :: proc(ctx: ^Gost_Context, data: []byte) {
-    length := byte(len(data))
-    j: byte
-
-    i := ctx.partial_bytes
-    for i < 32 && j < length {
-        ctx.partial[i] = data[j]
-        i, j = i + 1, j + 1
-    }
-
-    if i < 32 {
-        ctx.partial_bytes = i
-        return
-    }
-    bytes(ctx, ctx.partial[:], 256)
-
-    for (j + 32) < length {
-        bytes(ctx, data[j:], 256)
-        j += 32
-    }
-
-    i = 0
-    for j < length {
-        ctx.partial[i] = data[j]
-        i, j = i + 1, j + 1
-    }
-    ctx.partial_bytes = i
-}
-
-final :: proc(ctx: ^Gost_Context, hash: []byte) {
-    if ctx.partial_bytes > 0 {
-        mem.set(&ctx.partial[ctx.partial_bytes], 0, 32 - int(ctx.partial_bytes))
-        bytes(ctx, ctx.partial[:], u32(ctx.partial_bytes) << 3)
-    }
-  
-    compress(ctx.hash[:], ctx.len[:])
-    compress(ctx.hash[:], ctx.sum[:])
-
-    for i, j := 0, 0; i < 8; i, j = i + 1, j + 4 {
-        hash[j]     = byte(ctx.hash[i])
-        hash[j + 1] = byte(ctx.hash[i] >> 8)
-        hash[j + 2] = byte(ctx.hash[i] >> 16)
-        hash[j + 3] = byte(ctx.hash[i] >> 24)
-    }
-}
-
-/*
-    GOST implementation
-*/
-
-Gost_Context :: struct {
-    sum:           [8]u32,
-    hash:          [8]u32,
-    len:           [8]u32,
-    partial:       [32]byte,
-    partial_bytes: byte,
-}
-
-SBOX_1: [256]u32
-SBOX_2: [256]u32
-SBOX_3: [256]u32
-SBOX_4: [256]u32
-
-ENCRYPT_ROUND :: #force_inline proc "contextless" (l, r, t, k1, k2: u32) -> (u32, u32, u32) {
-    l, r, t := l, r, t
-    t  = (k1) + r
-    l ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24]
-    t  = (k2) + l
-    r ~= SBOX_1[t & 0xff] ~ SBOX_2[(t >> 8) & 0xff] ~ SBOX_3[(t >> 16) & 0xff] ~ SBOX_4[t >> 24]
-    return l, r, t
-}
-
-ENCRYPT :: #force_inline proc "contextless" (a, b, c: u32, key: []u32) -> (l, r, t: u32) {
-    l, r, t = ENCRYPT_ROUND(a, b, c, key[0], key[1])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[0], key[1])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[2], key[3])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[4], key[5])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[6], key[7])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[7], key[6])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[5], key[4])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[3], key[2])
-    l, r, t = ENCRYPT_ROUND(l, r, t, key[1], key[0])
-    t = r
-    r = l
-    l = t
-    return
-}
-
-bytes :: proc(ctx: ^Gost_Context, buf: []byte, bits: u32) {
-    a, c: u32
-    m: [8]u32
-
-    for i, j := 0, 0; i < 8; i += 1 {
-        a = u32(buf[j]) | u32(buf[j + 1]) << 8 | u32(buf[j + 2]) << 16 | u32(buf[j + 3]) << 24
-        j += 4
-        m[i] = a
-        c = a + c + ctx.sum[i]
-        ctx.sum[i] = c
-        c = c < a ? 1 : 0
-    }
-
-    compress(ctx.hash[:], m[:])
-    ctx.len[0] += bits
-    if ctx.len[0] < bits {
-        ctx.len[1] += 1
-    }
-}
-
-compress :: proc(h, m: []u32) {
-    key, u, v, w, s: [8]u32
-
-    copy(u[:], h)
-    copy(v[:], m)
-
-    for i := 0; i < 8; i += 2 {
-        w[0] = u[0] ~ v[0]
-        w[1] = u[1] ~ v[1]
-        w[2] = u[2] ~ v[2]
-        w[3] = u[3] ~ v[3]
-        w[4] = u[4] ~ v[4]
-        w[5] = u[5] ~ v[5]
-        w[6] = u[6] ~ v[6]
-        w[7] = u[7] ~ v[7]
-
-        key[0] = (w[0] & 0x000000ff)       | (w[2] & 0x000000ff) <<  8 | (w[4] & 0x000000ff) << 16 | (w[6] & 0x000000ff) << 24
-        key[1] = (w[0] & 0x0000ff00) >>  8 | (w[2] & 0x0000ff00)       | (w[4] & 0x0000ff00) <<  8 | (w[6] & 0x0000ff00) << 16
-        key[2] = (w[0] & 0x00ff0000) >> 16 | (w[2] & 0x00ff0000) >>  8 | (w[4] & 0x00ff0000)       | (w[6] & 0x00ff0000) <<  8
-        key[3] = (w[0] & 0xff000000) >> 24 | (w[2] & 0xff000000) >> 16 | (w[4] & 0xff000000) >>  8 | (w[6] & 0xff000000)
-        key[4] = (w[1] & 0x000000ff)       | (w[3] & 0x000000ff) <<  8 | (w[5] & 0x000000ff) << 16 | (w[7] & 0x000000ff) << 24
-        key[5] = (w[1] & 0x0000ff00) >>  8 | (w[3] & 0x0000ff00)       | (w[5] & 0x0000ff00) <<  8 | (w[7] & 0x0000ff00) << 16
-        key[6] = (w[1] & 0x00ff0000) >> 16 | (w[3] & 0x00ff0000) >>  8 | (w[5] & 0x00ff0000)       | (w[7] & 0x00ff0000) <<  8
-        key[7] = (w[1] & 0xff000000) >> 24 | (w[3] & 0xff000000) >> 16 | (w[5] & 0xff000000) >>  8 | (w[7] & 0xff000000)
-
-        r := h[i]
-        l := h[i + 1]
-        t: u32
-        l, r, t = ENCRYPT(l, r, 0, key[:])
-
-        s[i] = r
-        s[i + 1] = l
-
-        if i == 6 {
-            break
-        }
-
-        l    = u[0] ~ u[2]
-        r    = u[1] ~ u[3]
-        u[0] = u[2]
-        u[1] = u[3]
-        u[2] = u[4]
-        u[3] = u[5]
-        u[4] = u[6]
-        u[5] = u[7]
-        u[6] = l
-        u[7] = r
-
-        if i == 2 {
-            u[0] ~= 0xff00ff00
-            u[1] ~= 0xff00ff00
-            u[2] ~= 0x00ff00ff
-            u[3] ~= 0x00ff00ff
-            u[4] ~= 0x00ffff00
-            u[5] ~= 0xff0000ff
-            u[6] ~= 0x000000ff
-            u[7] ~= 0xff00ffff
-        }
-
-        l    = v[0]
-        r    = v[2]
-        v[0] = v[4]
-        v[2] = v[6]
-        v[4] = l ~ r
-        v[6] = v[0] ~ r
-        l    = v[1]
-        r    = v[3]
-        v[1] = v[5]
-        v[3] = v[7]
-        v[5] = l ~ r
-        v[7] = v[1] ~ r
-    }
-
-    u[0] = m[0] ~ s[6]
-    u[1] = m[1] ~ s[7]
-    u[2] = m[2] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff) ~ 
-        (s[1] & 0xffff) ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[6] ~ (s[6] << 16) ~
-        (s[7] & 0xffff0000) ~ (s[7] >> 16)
-    u[3] = m[3] ~ (s[0] & 0xffff) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~
-        (s[1] << 16) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~
-        (s[3] << 16) ~ s[6] ~ (s[6] << 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~
-        (s[7] << 16) ~ (s[7] >> 16)
-    u[4] = m[4] ~
-        (s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[0] >> 16) ~
-        (s[1] & 0xffff0000) ~ (s[1] >> 16) ~ (s[2] << 16) ~ (s[2] >> 16) ~
-        (s[3] << 16) ~ (s[3] >> 16) ~ (s[4] << 16) ~ (s[6] << 16) ~
-        (s[6] >> 16) ~(s[7] & 0xffff) ~ (s[7] << 16) ~ (s[7] >> 16)
-    u[5] = m[5] ~ (s[0] << 16) ~ (s[0] >> 16) ~ (s[0] & 0xffff0000) ~
-        (s[1] & 0xffff) ~ s[2] ~ (s[2] >> 16) ~ (s[3] << 16) ~ (s[3] >> 16) ~
-        (s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~  (s[6] << 16) ~
-        (s[6] >> 16) ~ (s[7] & 0xffff0000) ~ (s[7] << 16) ~ (s[7] >> 16)
-    u[6] = m[6] ~ s[0] ~ (s[1] >> 16) ~ (s[2] << 16) ~ s[3] ~ (s[3] >> 16) ~
-        (s[4] << 16) ~ (s[4] >> 16) ~ (s[5] << 16) ~ (s[5] >> 16) ~ s[6] ~
-        (s[6] << 16) ~ (s[6] >> 16) ~ (s[7] << 16)
-    u[7] = m[7] ~ (s[0] & 0xffff0000) ~ (s[0] << 16) ~ (s[1] & 0xffff) ~
-        (s[1] << 16) ~ (s[2] >> 16) ~ (s[3] << 16) ~ s[4] ~ (s[4] >> 16) ~
-        (s[5] << 16) ~ (s[5] >> 16) ~ (s[6] >> 16) ~ (s[7] & 0xffff) ~
-        (s[7] << 16) ~ (s[7] >> 16)
-
-    v[0] = h[0] ~ (u[1] << 16) ~ (u[0] >> 16)
-    v[1] = h[1] ~ (u[2] << 16) ~ (u[1] >> 16)
-    v[2] = h[2] ~ (u[3] << 16) ~ (u[2] >> 16)
-    v[3] = h[3] ~ (u[4] << 16) ~ (u[3] >> 16)
-    v[4] = h[4] ~ (u[5] << 16) ~ (u[4] >> 16)
-    v[5] = h[5] ~ (u[6] << 16) ~ (u[5] >> 16)
-    v[6] = h[6] ~ (u[7] << 16) ~ (u[6] >> 16)
-    v[7] = h[7] ~ (u[0] & 0xffff0000) ~ (u[0] << 16) ~ (u[7] >> 16) ~ (u[1] & 0xffff0000) ~ (u[1] << 16) ~ (u[6] << 16) ~ (u[7] & 0xffff0000)
-
-    h[0] = (v[0] & 0xffff0000) ~ (v[0] << 16) ~ (v[0] >> 16) ~ (v[1] >> 16) ~
-        (v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ (v[4] << 16) ~
-        (v[5] >> 16) ~ v[5] ~ (v[6] >> 16) ~ (v[7] << 16) ~ (v[7] >> 16) ~
-        (v[7] & 0xffff)
-    h[1] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~ (v[1] & 0xffff) ~
-        v[2] ~ (v[2] >> 16) ~ (v[3] << 16) ~ (v[4] >> 16) ~ (v[5] << 16) ~
-        (v[6] << 16) ~ v[6] ~ (v[7] & 0xffff0000) ~ (v[7] >> 16)
-    h[2] = (v[0] & 0xffff) ~ (v[0] << 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~
-        (v[1] & 0xffff0000) ~ (v[2] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~
-        (v[5] >> 16) ~ v[6] ~ (v[6] >> 16) ~ (v[7] & 0xffff) ~ (v[7] << 16) ~
-        (v[7] >> 16)
-    h[3] = (v[0] << 16) ~ (v[0] >> 16) ~ (v[0] & 0xffff0000) ~
-        (v[1] & 0xffff0000) ~ (v[1] >> 16) ~ (v[2] << 16) ~ (v[2] >> 16) ~ v[2] ~
-        (v[3] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~
-        (v[7] & 0xffff) ~ (v[7] >> 16)
-    h[4] = (v[0] >> 16) ~ (v[1] << 16) ~ v[1] ~ (v[2] >> 16) ~ v[2] ~
-        (v[3] << 16) ~ (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ (v[5] >> 16) ~
-        v[5] ~ (v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16)
-    h[5] = (v[0] << 16) ~ (v[0] & 0xffff0000) ~ (v[1] << 16) ~ (v[1] >> 16) ~
-        (v[1] & 0xffff0000) ~ (v[2] << 16) ~ v[2] ~ (v[3] >> 16) ~ v[3] ~
-        (v[4] << 16) ~ (v[4] >> 16) ~ v[4] ~ (v[5] << 16) ~ (v[6] << 16) ~
-        (v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ (v[7] >> 16) ~ (v[7] & 0xffff0000)
-    h[6] = v[0] ~ v[2] ~ (v[2] >> 16) ~ v[3] ~ (v[3] << 16) ~ v[4] ~
-        (v[4] >> 16) ~ (v[5] << 16) ~ (v[5] >> 16) ~ v[5] ~ (v[6] << 16) ~
-        (v[6] >> 16) ~ v[6] ~ (v[7] << 16) ~ v[7]
-    h[7] = v[0] ~ (v[0] >> 16) ~ (v[1] << 16) ~ (v[1] >> 16) ~ (v[2] << 16) ~
-        (v[3] >> 16) ~ v[3] ~ (v[4] << 16) ~ v[4] ~ (v[5] >> 16) ~ v[5] ~
-        (v[6] << 16) ~ (v[6] >> 16) ~ (v[7] << 16) ~ v[7]
-}

+ 0 - 653
core/crypto/groestl/groestl.odin

@@ -1,653 +0,0 @@
-package groestl
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the GROESTL hashing algorithm, as defined in <http://www.groestl.info/Groestl.zip>
-*/
-
-import "core:os"
-import "core:io"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_224 :: 28
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_384 :: 48
-DIGEST_SIZE_512 :: 64
-
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224(transmute([]byte)(data))
-}
-
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Groestl_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-    hash_stream_224,
-    hash_file_224,
-    hash_bytes_224,
-    hash_string_224,
-    hash_bytes_to_buffer_224,
-    hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Groestl_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-    return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: Groestl_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-    if !load_at_once {
-        return hash_stream_384(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_384(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-    hash_stream_384,
-    hash_file_384,
-    hash_bytes_384,
-    hash_string_384,
-    hash_bytes_to_buffer_384,
-    hash_string_to_buffer_384,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: Groestl_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Groestl_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Groestl_Context) {
-    assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512")
-    if ctx.hashbitlen <= 256 {
-        ctx.rounds    = 10
-        ctx.columns   = 8
-        ctx.statesize = 64
-    } else {
-        ctx.rounds    = 14
-        ctx.columns   = 16
-        ctx.statesize = 128
-    }
-    for i := 8 - size_of(i32); i < 8; i += 1 {
-        ctx.chaining[i][ctx.columns - 1] = byte(ctx.hashbitlen >> (8 * (7 - uint(i))))
-    }
-}
-
-update :: proc(ctx: ^Groestl_Context, data: []byte) {
-    databitlen := len(data) * 8
-    msglen     := databitlen / 8
-    rem        := databitlen % 8
-
-    i: int
-    assert(ctx.bits_in_last_byte == 0)
-
-    if ctx.buf_ptr != 0 {
-        for i = 0; ctx.buf_ptr < ctx.statesize && i < msglen; i, ctx.buf_ptr =  i + 1, ctx.buf_ptr + 1 {
-            ctx.buffer[ctx.buf_ptr] = data[i]
-        }
-
-        if ctx.buf_ptr < ctx.statesize {
-            if rem != 0 {
-                ctx.bits_in_last_byte    = rem
-                ctx.buffer[ctx.buf_ptr]  = data[i]
-                ctx.buf_ptr             += 1
-            }
-            return
-        }
-
-        ctx.buf_ptr = 0
-        transform(ctx, ctx.buffer[:], u32(ctx.statesize))
-    }
-
-    transform(ctx, data[i:], u32(msglen - i))
-    i += ((msglen - i) / ctx.statesize) * ctx.statesize
-    for i < msglen {
-        ctx.buffer[ctx.buf_ptr] = data[i]
-        i, ctx.buf_ptr          = i + 1, ctx.buf_ptr + 1
-    }
-    
-    if rem != 0 {
-        ctx.bits_in_last_byte    = rem
-        ctx.buffer[ctx.buf_ptr]  = data[i]
-        ctx.buf_ptr             += 1
-    }
-}
-
-final :: proc(ctx: ^Groestl_Context, hash: []byte) {
-    hashbytelen := ctx.hashbitlen / 8
-
-    if ctx.bits_in_last_byte != 0 {
-        ctx.buffer[ctx.buf_ptr - 1] &= ((1 << uint(ctx.bits_in_last_byte)) - 1) << (8 - uint(ctx.bits_in_last_byte))
-        ctx.buffer[ctx.buf_ptr - 1] ~= 0x1 << (7 - uint(ctx.bits_in_last_byte))
-    } else {
-        ctx.buffer[ctx.buf_ptr]  = 0x80
-        ctx.buf_ptr             += 1
-    }
-
-    if ctx.buf_ptr > ctx.statesize - 8 {
-        for ctx.buf_ptr < ctx.statesize {
-            ctx.buffer[ctx.buf_ptr]  = 0
-            ctx.buf_ptr             += 1
-        }
-        transform(ctx, ctx.buffer[:], u32(ctx.statesize))
-        ctx.buf_ptr = 0
-    }
-
-    for ctx.buf_ptr < ctx.statesize - 8 {
-        ctx.buffer[ctx.buf_ptr]  = 0
-        ctx.buf_ptr             += 1
-    }
-
-    ctx.block_counter += 1
-    ctx.buf_ptr        = ctx.statesize
-
-    for ctx.buf_ptr > ctx.statesize - 8 {
-        ctx.buf_ptr              -= 1
-        ctx.buffer[ctx.buf_ptr]   = byte(ctx.block_counter)
-        ctx.block_counter       >>= 8
-    }
-
-    transform(ctx, ctx.buffer[:], u32(ctx.statesize))
-    output_transformation(ctx)
-
-    for i, j := ctx.statesize - hashbytelen , 0; i < ctx.statesize; i, j = i + 1, j + 1 {
-        hash[j] = ctx.chaining[i % 8][i / 8]
-    }
-}
-
-/*
-    GROESTL implementation
-*/
-
-SBOX := [256]byte {
-    0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
-    0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
-    0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
-    0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
-    0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc,
-    0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
-    0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a,
-    0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
-    0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0,
-    0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
-    0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
-    0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
-    0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85,
-    0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
-    0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5,
-    0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
-    0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17,
-    0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
-    0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88,
-    0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
-    0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c,
-    0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
-    0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9,
-    0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
-    0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6,
-    0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
-    0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e,
-    0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
-    0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94,
-    0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
-    0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68,
-    0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
-}
-
-SHIFT := [2][2][8]int {
-    {{0, 1, 2, 3, 4, 5, 6, 7},  {1, 3, 5, 7,  0, 2, 4, 6}},
-    {{0, 1, 2, 3, 4, 5, 6, 11}, {1, 3, 5, 11, 0, 2, 4, 6}},
-}
-
-Groestl_Context :: struct {
-    chaining:          [8][16]byte,
-    block_counter:     u64,
-    hashbitlen:        int,
-    buffer:            [128]byte,
-    buf_ptr:           int,
-    bits_in_last_byte: int,
-    columns:           int,
-    rounds:            int,
-    statesize:         int,
-}
-
-Groestl_Variant :: enum {
-    P512  = 0, 
-    Q512  = 1, 
-    P1024 = 2, 
-    Q1024 = 3,
-}
-
-MUL2 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return (b >> 7) != 0 ? (b << 1) ~ 0x1b : (b << 1)
-}
-
-MUL3 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return MUL2(b) ~ b
-}
-
-MUL4 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return MUL2(MUL2(b))
-}
-
-MUL5 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return MUL4(b) ~ b
-}
-
-MUL6 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return MUL4(b) ~ MUL2(b)
-}
-
-MUL7 :: #force_inline proc "contextless"(b: byte) -> byte {
-    return MUL4(b) ~ MUL2(b) ~ b
-}
-
-sub_bytes :: #force_inline proc (x: [][16]byte, columns: int) {
-    for i := 0; i < 8; i += 1 {
-        for j := 0; j < columns; j += 1 {
-            x[i][j] = SBOX[x[i][j]]
-        }
-    }
-}
-
-shift_bytes :: #force_inline proc (x: [][16]byte, columns: int, v: Groestl_Variant) {
-    temp: [16]byte
-    R := &SHIFT[int(v) / 2][int(v) & 1]
-
-    for i := 0; i < 8; i += 1 {
-        for j := 0; j < columns; j += 1 {
-            temp[j] = x[i][(j + R[i]) % columns]
-        }
-        for j := 0; j < columns; j += 1 {
-            x[i][j] = temp[j]
-        }
-    }
-}
-
-mix_bytes :: #force_inline proc (x: [][16]byte, columns: int) {
-    temp: [8]byte
-
-    for i := 0; i < columns; i += 1 {
-        for j := 0; j < 8; j += 1 {
-            temp[j] =  MUL2(x[(j + 0) % 8][i]) ~
-                       MUL2(x[(j + 1) % 8][i]) ~
-                       MUL3(x[(j + 2) % 8][i]) ~
-                       MUL4(x[(j + 3) % 8][i]) ~
-                       MUL5(x[(j + 4) % 8][i]) ~
-                       MUL3(x[(j + 5) % 8][i]) ~
-                       MUL5(x[(j + 6) % 8][i]) ~
-                       MUL7(x[(j + 7) % 8][i])
-        }
-        for j := 0; j < 8; j += 1 {
-            x[j][i] = temp[j]
-        }
-    }
-}
-
-p :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) {
-    v := ctx.columns == 8 ? Groestl_Variant.P512 : Groestl_Variant.P1024
-    for i := 0; i < ctx.rounds; i += 1 {
-        add_roundconstant(x, ctx.columns, byte(i), v)
-        sub_bytes(x, ctx.columns)
-        shift_bytes(x, ctx.columns, v)
-        mix_bytes(x, ctx.columns)
-    }
-}
-
-q :: #force_inline proc (ctx: ^Groestl_Context, x: [][16]byte) {
-    v := ctx.columns == 8 ? Groestl_Variant.Q512 : Groestl_Variant.Q1024
-    for i := 0; i < ctx.rounds; i += 1 {
-        add_roundconstant(x, ctx.columns, byte(i), v)
-        sub_bytes(x, ctx.columns)
-        shift_bytes(x, ctx.columns, v)
-        mix_bytes(x, ctx.columns)
-    }
-}
-
-transform :: proc(ctx: ^Groestl_Context, input: []byte, msglen: u32) {
-    tmp1, tmp2: [8][16]byte
-    input, msglen := input, msglen
-
-    for msglen >= u32(ctx.statesize) {
-        for i := 0; i < 8; i += 1 {
-            for j := 0; j < ctx.columns; j += 1 {
-                tmp1[i][j] = ctx.chaining[i][j] ~ input[j * 8 + i]
-                tmp2[i][j] = input[j * 8 + i]
-            }
-        }
-
-        p(ctx, tmp1[:])
-        q(ctx, tmp2[:])
-
-        for i := 0; i < 8; i += 1 {
-            for j := 0; j < ctx.columns; j += 1 {
-                ctx.chaining[i][j] ~= tmp1[i][j] ~ tmp2[i][j]
-            }
-        }
-
-        ctx.block_counter += 1
-        msglen            -= u32(ctx.statesize)
-        input              = input[ctx.statesize:]
-    }
-}
-
-output_transformation :: proc(ctx: ^Groestl_Context) {
-    temp: [8][16]byte
-
-    for i := 0; i < 8; i += 1 {
-        for j := 0; j < ctx.columns; j += 1 {
-            temp[i][j] = ctx.chaining[i][j]
-        }
-    }
-
-    p(ctx, temp[:])
-
-    for i := 0; i < 8; i += 1 {
-        for j := 0; j < ctx.columns; j += 1 {
-            ctx.chaining[i][j] ~= temp[i][j]
-        }
-    }
-}
-
-add_roundconstant :: proc(x: [][16]byte, columns: int, round: byte, v: Groestl_Variant) {
-    switch (i32(v) & 1) {
-        case 0: 
-            for i := 0; i < columns; i += 1 {
-                x[0][i] ~= byte(i << 4) ~ round
-            }
-        case 1:
-            for i := 0; i < columns; i += 1 {
-                for j := 0; j < 7; j += 1 {
-                    x[j][i] ~= 0xff
-                }
-            }
-            for i := 0; i < columns; i += 1 {
-                x[7][i] ~= byte(i << 4) ~ 0xff ~ round
-            }
-    }
-}

+ 0 - 1814
core/crypto/haval/haval.odin

@@ -1,1814 +0,0 @@
-package haval
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation for the HAVAL hashing algorithm as defined in <https://web.archive.org/web/20150111210116/http://labs.calyptix.com/haval.php>
-*/
-
-import "core:mem"
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_160 :: 20
-DIGEST_SIZE_192 :: 24
-DIGEST_SIZE_224 :: 28
-DIGEST_SIZE_256 :: 32
-
-// hash_string_128_3 will hash the given input and return the
-// computed hash
-hash_string_128_3 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128_3(transmute([]byte)(data))
-}
-
-// hash_bytes_128_3 will hash the given input and return the
-// computed hash
-hash_bytes_128_3 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128_3 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128_3 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128_3(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128_3 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128_3 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_128_3 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 3
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128_3 will read the file provided by the given handle
-// and compute a hash
-hash_file_128_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128_3(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128_3(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128_3 :: proc {
-    hash_stream_128_3,
-    hash_file_128_3,
-    hash_bytes_128_3,
-    hash_string_128_3,
-    hash_bytes_to_buffer_128_3,
-    hash_string_to_buffer_128_3,
-}
-
-// hash_string_128_4 will hash the given input and return the
-// computed hash
-hash_string_128_4 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128_4(transmute([]byte)(data))
-}
-
-// hash_bytes_128_4 will hash the given input and return the
-// computed hash
-hash_bytes_128_4 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128_4 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128_4 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128_4(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128_4 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128_4 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_128_4 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 4
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128_4 will read the file provided by the given handle
-// and compute a hash
-hash_file_128_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128_4(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128_4(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128_4 :: proc {
-    hash_stream_128_4,
-    hash_file_128_4,
-    hash_bytes_128_4,
-    hash_string_128_4,
-    hash_bytes_to_buffer_128_4,
-    hash_string_to_buffer_128_4,
-}
-
-// hash_string_128_5 will hash the given input and return the
-// computed hash
-hash_string_128_5 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128_5(transmute([]byte)(data))
-}
-
-// hash_bytes_128_5 will hash the given input and return the
-// computed hash
-hash_bytes_128_5 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128_5 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128_5 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128_5(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128_5 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128_5 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_128_5 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 128
-    ctx.rounds = 5
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128_5 will read the file provided by the given handle
-// and compute a hash
-hash_file_128_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128_5(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128_5(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128_5 :: proc {
-    hash_stream_128_5,
-    hash_file_128_5,
-    hash_bytes_128_5,
-    hash_string_128_5,
-    hash_bytes_to_buffer_128_5,
-    hash_string_to_buffer_128_5,
-}
-
-// hash_string_160_3 will hash the given input and return the
-// computed hash
-hash_string_160_3 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160_3(transmute([]byte)(data))
-}
-
-// hash_bytes_160_3 will hash the given input and return the
-// computed hash
-hash_bytes_160_3 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160_3 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160_3 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160_3(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160_3 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160_3 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_160_3 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 3
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160_3 will read the file provided by the given handle
-// and compute a hash
-hash_file_160_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160_3(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160_3(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160_3 :: proc {
-    hash_stream_160_3,
-    hash_file_160_3,
-    hash_bytes_160_3,
-    hash_string_160_3,
-    hash_bytes_to_buffer_160_3,
-    hash_string_to_buffer_160_3,
-}
-
-// hash_string_160_4 will hash the given input and return the
-// computed hash
-hash_string_160_4 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160_4(transmute([]byte)(data))
-}
-
-// hash_bytes_160_4 will hash the given input and return the
-// computed hash
-hash_bytes_160_4 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160_4 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160_4 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160_4(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160_4 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160_4 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_160_4 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 4
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160_4 will read the file provided by the given handle
-// and compute a hash
-hash_file_160_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160_4(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160_4(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160_4 :: proc {
-    hash_stream_160_4,
-    hash_file_160_4,
-    hash_bytes_160_4,
-    hash_string_160_4,
-    hash_bytes_to_buffer_160_4,
-    hash_string_to_buffer_160_4,
-}
-
-// hash_string_160_5 will hash the given input and return the
-// computed hash
-hash_string_160_5 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160_5(transmute([]byte)(data))
-}
-
-// hash_bytes_160_5 will hash the given input and return the
-// computed hash
-hash_bytes_160_5 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160_5 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160_5 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160_5(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160_5 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160_5 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_160_5 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 160
-    ctx.rounds = 5
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160_5 will read the file provided by the given handle
-// and compute a hash
-hash_file_160_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160_5(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160_5(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160_5 :: proc {
-    hash_stream_160_5,
-    hash_file_160_5,
-    hash_bytes_160_5,
-    hash_string_160_5,
-    hash_bytes_to_buffer_160_5,
-    hash_string_to_buffer_160_5,
-}
-
-// hash_string_192_3 will hash the given input and return the
-// computed hash
-hash_string_192_3 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192_3(transmute([]byte)(data))
-}
-
-// hash_bytes_192_3 will hash the given input and return the
-// computed hash
-hash_bytes_192_3 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_192_3 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192_3 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192_3(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192_3 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192_3 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_192_3 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 3
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_192_3 will read the file provided by the given handle
-// and compute a hash
-hash_file_192_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192_3(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192_3(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192_3 :: proc {
-    hash_stream_192_3,
-    hash_file_192_3,
-    hash_bytes_192_3,
-    hash_string_192_3,
-    hash_bytes_to_buffer_192_3,
-    hash_string_to_buffer_192_3,
-}
-
-// hash_string_192_4 will hash the given input and return the
-// computed hash
-hash_string_192_4 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192_4(transmute([]byte)(data))
-}
-
-// hash_bytes_192_4 will hash the given input and return the
-// computed hash
-hash_bytes_192_4 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_192_4 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192_4 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192_4(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192_4 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192_4 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_192_4 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 4
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_192_4 will read the file provided by the given handle
-// and compute a hash
-hash_file_192_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192_4(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192_4(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192_4 :: proc {
-    hash_stream_192_4,
-    hash_file_192_4,
-    hash_bytes_192_4,
-    hash_string_192_4,
-    hash_bytes_to_buffer_192_4,
-    hash_string_to_buffer_192_4,
-}
-
-// hash_string_192_5 will hash the given input and return the
-// computed hash
-hash_string_192_5 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192_5(transmute([]byte)(data))
-}
-
-// hash_bytes_2DIGEST_SIZE_192_5 will hash the given input and return the
-// computed hash
-hash_bytes_192_5 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_192_5 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192_5 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192_5(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192_5 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192_5 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_192_5 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 192
-    ctx.rounds = 5
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_192_5 will read the file provided by the given handle
-// and compute a hash
-hash_file_192_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192_5(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192_5(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192_5 :: proc {
-    hash_stream_192_5,
-    hash_file_192_5,
-    hash_bytes_192_5,
-    hash_string_192_5,
-    hash_bytes_to_buffer_192_5,
-    hash_string_to_buffer_192_5,
-}
-
-// hash_string_224_3 will hash the given input and return the
-// computed hash
-hash_string_224_3 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224_3(transmute([]byte)(data))
-}
-
-// hash_bytes_224_3 will hash the given input and return the
-// computed hash
-hash_bytes_224_3 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224_3 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224_3 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224_3(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224_3 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224_3 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224_3 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 3
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224_3 will read the file provided by the given handle
-// and compute a hash
-hash_file_224_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224_3(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224_3(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224_3 :: proc {
-    hash_stream_224_3,
-    hash_file_224_3,
-    hash_bytes_224_3,
-    hash_string_224_3,
-    hash_bytes_to_buffer_224_3,
-    hash_string_to_buffer_224_3,
-}
-
-// hash_string_224_4 will hash the given input and return the
-// computed hash
-hash_string_224_4 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224_4(transmute([]byte)(data))
-}
-
-// hash_bytes_224_4 will hash the given input and return the
-// computed hash
-hash_bytes_224_4 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224_4 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224_4 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224_4(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224_4 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224_4 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224_4 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 4
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224_4 will read the file provided by the given handle
-// and compute a hash
-hash_file_224_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224_4(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224_4(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224_4 :: proc {
-    hash_stream_224_4,
-    hash_file_224_4,
-    hash_bytes_224_4,
-    hash_string_224_4,
-    hash_bytes_to_buffer_224_4,
-    hash_string_to_buffer_224_4,
-}
-
-// hash_string_224_5 will hash the given input and return the
-// computed hash
-hash_string_224_5 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224_5(transmute([]byte)(data))
-}
-
-// hash_bytes_224_5 will hash the given input and return the
-// computed hash
-hash_bytes_224_5 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224_5 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224_5 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224_5(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224_5 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224_5 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224_5 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 224
-    ctx.rounds = 5
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224_5 will read the file provided by the given handle
-// and compute a hash
-hash_file_224_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224_5(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224_5(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224_5 :: proc {
-    hash_stream_224_5,
-    hash_file_224_5,
-    hash_bytes_224_5,
-    hash_string_224_5,
-    hash_bytes_to_buffer_224_5,
-    hash_string_to_buffer_224_5,
-}
-
-// hash_string_256_3 will hash the given input and return the
-// computed hash
-hash_string_256_3 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256_3(transmute([]byte)(data))
-}
-
-// hash_bytes_256_3 will hash the given input and return the
-// computed hash
-hash_bytes_256_3 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256_3 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256_3 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256_3(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256_3 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256_3 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 3
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256_3 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 3
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256_3 will read the file provided by the given handle
-// and compute a hash
-hash_file_256_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256_3(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256_3(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256_3 :: proc {
-    hash_stream_256_3,
-    hash_file_256_3,
-    hash_bytes_256_3,
-    hash_string_256_3,
-    hash_bytes_to_buffer_256_3,
-    hash_string_to_buffer_256_3,
-}
-
-// hash_string_256_4 will hash the given input and return the
-// computed hash
-hash_string_256_4 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256_4(transmute([]byte)(data))
-}
-
-// hash_bytes_256_4 will hash the given input and return the
-// computed hash
-hash_bytes_256_4 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256_4 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256_4 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256_4(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256_4 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256_4 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 4
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256_4 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 4
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256_4 will read the file provided by the given handle
-// and compute a hash
-hash_file_256_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256_4(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256_4(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256_4 :: proc {
-    hash_stream_256_4,
-    hash_file_256_4,
-    hash_bytes_256_4,
-    hash_string_256_4,
-    hash_bytes_to_buffer_256_4,
-    hash_string_to_buffer_256_4,
-}
-
-// hash_string_256_5 will hash the given input and return the
-// computed hash
-hash_string_256_5 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256_5(transmute([]byte)(data))
-}
-
-// hash_bytes_256_5 will hash the given input and return the
-// computed hash
-hash_bytes_256_5 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256_5 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256_5 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256_5(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256_5 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256_5 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 5
-    init(&ctx)
-    ctx.str_len = u32(len(data))
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-
-// hash_stream_256_5 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Haval_Context
-    ctx.hashbitlen = 256
-    ctx.rounds = 5
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        ctx.str_len = u32(len(buf[:read]))
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256_5 will read the file provided by the given handle
-// and compute a hash
-hash_file_256_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256_5(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256_5(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256_5 :: proc {
-    hash_stream_256_5,
-    hash_file_256_5,
-    hash_bytes_256_5,
-    hash_string_256_5,
-    hash_bytes_to_buffer_256_5,
-    hash_string_to_buffer_256_5,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Haval_Context) {
-    assert(ctx.hashbitlen == 128 || ctx.hashbitlen == 160 || ctx.hashbitlen == 192 || ctx.hashbitlen == 224 || ctx.hashbitlen == 256, "hashbitlen must be set to 128, 160, 192, 224 or 256")
-    assert(ctx.rounds == 3 || ctx.rounds == 4 || ctx.rounds == 5, "rounds must be set to 3, 4 or 5")
-    ctx.fingerprint[0] = 0x243f6a88
-    ctx.fingerprint[1] = 0x85a308d3
-    ctx.fingerprint[2] = 0x13198a2e
-    ctx.fingerprint[3] = 0x03707344
-    ctx.fingerprint[4] = 0xa4093822
-    ctx.fingerprint[5] = 0x299f31d0
-    ctx.fingerprint[6] = 0x082efa98
-    ctx.fingerprint[7] = 0xec4e6c89
-}
-
-// @note(zh): Make sure to set ctx.str_len to the remaining buffer size before calling this proc - e.g. ctx.str_len = u32(len(data))
-update :: proc(ctx: ^Haval_Context, data: []byte) {
-    i: u32
-    rmd_len  := u32((ctx.count[0] >> 3) & 0x7f)
-    fill_len := 128 - rmd_len
-    str_len  := ctx.str_len
-
-    ctx.count[0] += str_len << 3
-    if ctx.count[0] < (str_len << 3) {
-        ctx.count[1] += 1
-    }
-    ctx.count[1] += str_len >> 29
-
-    when ODIN_ENDIAN == .Little {
-        if rmd_len + str_len >= 128 {
-            copy(util.slice_to_bytes(ctx.block[:])[rmd_len:], data[:fill_len])
-            block(ctx, ctx.rounds)
-            for i = fill_len; i + 127 < str_len; i += 128 {
-                copy(util.slice_to_bytes(ctx.block[:]), data[i:128])
-                block(ctx, ctx.rounds)
-            }
-            rmd_len = 0
-        } else {
-            i = 0
-        }
-        copy(util.slice_to_bytes(ctx.block[:])[rmd_len:], data[i:])
-    } else {
-        if rmd_len + str_len >= 128 {
-            copy(ctx.remainder[rmd_len:], data[:fill_len])
-            CH2UINT(ctx.remainder[:], ctx.block[:])
-            block(ctx, ctx.rounds)
-            for i = fill_len; i + 127 < str_len; i += 128 {
-                copy(ctx.remainder[:], data[i:128])
-                CH2UINT(ctx.remainder[:], ctx.block[:])
-                block(ctx, ctx.rounds)
-            }
-            rmd_len = 0
-        } else {
-            i = 0
-        }
-        copy(ctx.remainder[rmd_len:], data[i:])
-    }
-}
-
-final :: proc(ctx: ^Haval_Context, hash: []byte) {
-    pad_len: u32
-    tail: [10]byte
-
-    tail[0] = byte(ctx.hashbitlen & 0x3) << 6 | byte(ctx.rounds & 0x7) << 3 | (VERSION & 0x7)
-    tail[1] = byte(ctx.hashbitlen >> 2) & 0xff
-
-    UINT2CH(ctx.count[:], util.slice_to_bytes(tail[2:]), 2)
-    rmd_len := (ctx.count[0] >> 3) & 0x7f
-    if rmd_len < 118 {
-        pad_len = 118 - rmd_len
-    } else {
-        pad_len = 246 - rmd_len
-    }
-
-    ctx.str_len = pad_len
-    update(ctx, PADDING[:])
-    ctx.str_len = 10
-    update(ctx, tail[:])
-    tailor(ctx, ctx.hashbitlen)
-    UINT2CH(ctx.fingerprint[:], hash, ctx.hashbitlen >> 5)
-
-    mem.set(ctx, 0, size_of(ctx))
-}
-
-/*
-    HAVAL implementation
-*/
-
-VERSION :: 1
-
-Haval_Context :: struct {
-    count:       [2]u32,
-    fingerprint: [8]u32,
-    block:       [32]u32,
-    remainder:   [128]byte,
-    rounds:      u32,
-    hashbitlen:  u32,
-    str_len:     u32,
-}
-
-PADDING := [128]byte {
-   0x01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-   0,    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-}
-
-F_1 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 {
-    return ((x1) & ((x0) ~ (x4)) ~ (x2) & (x5) ~ (x3) & (x6) ~ (x0))
-}
-
-F_2 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 {
-    return ((x2) & ((x1) & ~(x3) ~ (x4) & (x5) ~ (x6) ~ (x0)) ~ (x4) & ((x1) ~ (x5)) ~ (x3) & (x5) ~ (x0))
-}
-
-F_3 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 {
-    return ((x3) & ((x1) & (x2) ~ (x6) ~ (x0)) ~ (x1) & (x4) ~ (x2) & (x5) ~ (x0))
-}
-
-F_4 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 {
-    return ((x4) & ((x5) & ~(x2) ~ (x3) & ~(x6) ~ (x1) ~ (x6) ~ (x0)) ~ (x3) & ((x1) & (x2) ~ (x5) ~ (x6)) ~ (x2) & (x6) ~ (x0))
-}
-
-F_5 :: #force_inline proc "contextless" (x6, x5, x4, x3, x2, x1, x0: u32) -> u32 {
-    return ((x0) & ((x1) & (x2) & (x3) ~ ~(x5)) ~ (x1) & (x4) ~ (x2) & (x5) ~ (x3) & (x6))
-}
-
-FPHI_1 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 {
-    switch rounds {
-        case 3: return F_1(x1, x0, x3, x5, x6, x2, x4)
-        case 4: return F_1(x2, x6, x1, x4, x5, x3, x0)
-        case 5: return F_1(x3, x4, x1, x0, x5, x2, x6)
-        case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!")
-    }
-    return 0
-}
-
-FPHI_2 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 {
-    switch rounds {
-        case 3: return F_2(x4, x2, x1, x0, x5, x3, x6)
-        case 4: return F_2(x3, x5, x2, x0, x1, x6, x4)
-        case 5: return F_2(x6, x2, x1, x0, x3, x4, x5)
-        case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!")
-    }
-    return 0
-}
-
-FPHI_3 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 {
-    switch rounds {
-        case 3: return F_3(x6, x1, x2, x3, x4, x5, x0)
-        case 4: return F_3(x1, x4, x3, x6, x0, x2, x5)
-        case 5: return F_3(x2, x6, x0, x4, x3, x1, x5)
-        case: assert(rounds < 3 || rounds > 5, "Rounds count not supported!")
-    }
-    return 0
-}
-
-FPHI_4 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 {
-    switch rounds {
-        case 4: return F_4(x6, x4, x0, x5, x2, x1, x3)
-        case 5: return F_4(x1, x5, x3, x2, x0, x4, x6)
-        case: assert(rounds < 4 || rounds > 5, "Rounds count not supported!")
-    }
-    return 0
-}
-
-FPHI_5 :: #force_inline proc(x6, x5, x4, x3, x2, x1, x0, rounds: u32) -> u32 {
-    switch rounds {
-        case 5: return F_5(x2, x5, x0, x6, x4, x3, x1)
-        case: assert(rounds != 5, "Rounds count not supported!")
-    }
-    return 0
-}
-
-FF_1 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, rounds: u32) -> u32 {
-    tmp := FPHI_1(x6, x5, x4, x3, x2, x1, x0, rounds)
-    x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w
-    return x8
-}
-
-FF_2 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 {
-    tmp := FPHI_2(x6, x5, x4, x3, x2, x1, x0, rounds)
-    x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c
-    return x8
-}
-
-FF_3 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 {
-    tmp := FPHI_3(x6, x5, x4, x3, x2, x1, x0, rounds)
-    x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c
-    return x8
-}
-
-FF_4 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 {
-    tmp := FPHI_4(x6, x5, x4, x3, x2, x1, x0, rounds)
-    x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c
-    return x8
-}
-
-FF_5 :: #force_inline proc(x7, x6, x5, x4, x3, x2, x1, x0, w, c, rounds: u32) -> u32 {
-    tmp := FPHI_5(x6, x5, x4, x3, x2, x1, x0, rounds)
-    x8 := util.ROTR32(tmp, 7) + util.ROTR32(x7, 11) + w + c
-    return x8
-}
-
-CH2UINT :: #force_inline proc "contextless" (str: []byte, word: []u32) {
-    for _, i in word[:32] {
-        word[i] = u32(str[i*4+0]) << 0 | u32(str[i*4+1]) << 8 | u32(str[i*4+2]) << 16 | u32(str[i*4+3]) << 24
-    }
-}
-
-UINT2CH :: #force_inline proc "contextless" (word: []u32, str: []byte, wlen: u32) {
-    for _, i in word[:wlen] {
-        str[i*4+0] = byte(word[i] >> 0) & 0xff
-        str[i*4+1] = byte(word[i] >> 8) & 0xff
-        str[i*4+2] = byte(word[i] >> 16) & 0xff
-        str[i*4+3] = byte(word[i] >> 24) & 0xff
-    }
-}
-
-block :: proc(ctx: ^Haval_Context, rounds: u32) {
-    t0, t1, t2, t3 := ctx.fingerprint[0], ctx.fingerprint[1], ctx.fingerprint[2], ctx.fingerprint[3]
-    t4, t5, t6, t7 := ctx.fingerprint[4], ctx.fingerprint[5], ctx.fingerprint[6], ctx.fingerprint[7]
-    w := ctx.block
-
-    t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[ 0], rounds)
-    t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[ 1], rounds)
-    t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[ 2], rounds)
-    t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[ 3], rounds)
-    t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[ 4], rounds)
-    t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[ 5], rounds)
-    t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[ 6], rounds)
-    t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[ 7], rounds)
-
-    t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[ 8], rounds)
-    t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], rounds)
-    t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[10], rounds)
-    t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[11], rounds)
-    t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[12], rounds)
-    t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[13], rounds)
-    t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[14], rounds)
-    t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[15], rounds)
-
-    t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[16], rounds)
-    t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[17], rounds)
-    t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[18], rounds)
-    t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[19], rounds)
-    t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[20], rounds)
-    t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[21], rounds)
-    t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[22], rounds)
-    t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[23], rounds)
-
-    t7 = FF_1(t7, t6, t5, t4, t3, t2, t1, t0, w[24], rounds)
-    t6 = FF_1(t6, t5, t4, t3, t2, t1, t0, t7, w[25], rounds)
-    t5 = FF_1(t5, t4, t3, t2, t1, t0, t7, t6, w[26], rounds)
-    t4 = FF_1(t4, t3, t2, t1, t0, t7, t6, t5, w[27], rounds)
-    t3 = FF_1(t3, t2, t1, t0, t7, t6, t5, t4, w[28], rounds)
-    t2 = FF_1(t2, t1, t0, t7, t6, t5, t4, t3, w[29], rounds)
-    t1 = FF_1(t1, t0, t7, t6, t5, t4, t3, t2, w[30], rounds)
-    t0 = FF_1(t0, t7, t6, t5, t4, t3, t2, t1, w[31], rounds)
-
-    t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[ 5], 0x452821e6, rounds)
-    t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[14], 0x38d01377, rounds)
-    t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[26], 0xbe5466cf, rounds)
-    t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[18], 0x34e90c6c, rounds)
-    t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[11], 0xc0ac29b7, rounds)
-    t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[28], 0xc97c50dd, rounds)
-    t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[ 7], 0x3f84d5b5, rounds)
-    t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[16], 0xb5470917, rounds)
-
-    t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[ 0], 0x9216d5d9, rounds)
-    t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[23], 0x8979fb1b, rounds)
-    t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[20], 0xd1310ba6, rounds)
-    t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[22], 0x98dfb5ac, rounds)
-    t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[ 1], 0x2ffd72db, rounds)
-    t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[10], 0xd01adfb7, rounds)
-    t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[ 4], 0xb8e1afed, rounds)
-    t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[ 8], 0x6a267e96, rounds)
-
-    t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[30], 0xba7c9045, rounds)
-    t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[ 3], 0xf12c7f99, rounds)
-    t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0x24a19947, rounds)
-    t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[ 9], 0xb3916cf7, rounds)
-    t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x0801f2e2, rounds)
-    t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[24], 0x858efc16, rounds)
-    t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[29], 0x636920d8, rounds)
-    t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[ 6], 0x71574e69, rounds)
-
-    t7 = FF_2(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0xa458fea3, rounds)
-    t6 = FF_2(t6, t5, t4, t3, t2, t1, t0, t7, w[12], 0xf4933d7e, rounds)
-    t5 = FF_2(t5, t4, t3, t2, t1, t0, t7, t6, w[15], 0x0d95748f, rounds)
-    t4 = FF_2(t4, t3, t2, t1, t0, t7, t6, t5, w[13], 0x728eb658, rounds)
-    t3 = FF_2(t3, t2, t1, t0, t7, t6, t5, t4, w[ 2], 0x718bcd58, rounds)
-    t2 = FF_2(t2, t1, t0, t7, t6, t5, t4, t3, w[25], 0x82154aee, rounds)
-    t1 = FF_2(t1, t0, t7, t6, t5, t4, t3, t2, w[31], 0x7b54a41d, rounds)
-    t0 = FF_2(t0, t7, t6, t5, t4, t3, t2, t1, w[27], 0xc25a59b5, rounds)
-
-    t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0x9c30d539, rounds)
-    t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], 0x2af26013, rounds)
-    t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[ 4], 0xc5d1b023, rounds)
-    t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[20], 0x286085f0, rounds)
-    t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[28], 0xca417918, rounds)
-    t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[17], 0xb8db38ef, rounds)
-    t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[ 8], 0x8e79dcb0, rounds)
-    t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[22], 0x603a180e, rounds)
-
-    t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[29], 0x6c9e0e8b, rounds)
-    t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[14], 0xb01e8a3e, rounds)
-    t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[25], 0xd71577c1, rounds)
-    t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[12], 0xbd314b27, rounds)
-    t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[24], 0x78af2fda, rounds)
-    t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[30], 0x55605c60, rounds)
-    t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[16], 0xe65525f3, rounds)
-    t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[26], 0xaa55ab94, rounds)
-
-    t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[31], 0x57489862, rounds)
-    t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[15], 0x63e81440, rounds)
-    t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[ 7], 0x55ca396a, rounds)
-    t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[ 3], 0x2aab10b6, rounds)
-    t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[ 1], 0xb4cc5c34, rounds)
-    t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[ 0], 0x1141e8ce, rounds)
-    t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[18], 0xa15486af, rounds)
-    t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[27], 0x7c72e993, rounds)
-
-    t7 = FF_3(t7, t6, t5, t4, t3, t2, t1, t0, w[13], 0xb3ee1411, rounds)
-    t6 = FF_3(t6, t5, t4, t3, t2, t1, t0, t7, w[ 6], 0x636fbc2a, rounds)
-    t5 = FF_3(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0x2ba9c55d, rounds)
-    t4 = FF_3(t4, t3, t2, t1, t0, t7, t6, t5, w[10], 0x741831f6, rounds)
-    t3 = FF_3(t3, t2, t1, t0, t7, t6, t5, t4, w[23], 0xce5c3e16, rounds)
-    t2 = FF_3(t2, t1, t0, t7, t6, t5, t4, t3, w[11], 0x9b87931e, rounds)
-    t1 = FF_3(t1, t0, t7, t6, t5, t4, t3, t2, w[ 5], 0xafd6ba33, rounds)
-    t0 = FF_3(t0, t7, t6, t5, t4, t3, t2, t1, w[ 2], 0x6c24cf5c, rounds)
-
-    if rounds >= 4 {
-        t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[24], 0x7a325381, rounds)
-        t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[ 4], 0x28958677, rounds)
-        t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[ 0], 0x3b8f4898, rounds)
-        t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[14], 0x6b4bb9af, rounds)
-        t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[ 2], 0xc4bfe81b, rounds)
-        t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[ 7], 0x66282193, rounds)
-        t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[28], 0x61d809cc, rounds)
-        t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[23], 0xfb21a991, rounds)
-
-        t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[26], 0x487cac60, rounds)
-        t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[ 6], 0x5dec8032, rounds)
-        t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[30], 0xef845d5d, rounds)
-        t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[20], 0xe98575b1, rounds)
-        t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[18], 0xdc262302, rounds)
-        t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[25], 0xeb651b88, rounds)
-        t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[19], 0x23893e81, rounds)
-        t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[ 3], 0xd396acc5, rounds)
-
-        t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[22], 0x0f6d6ff3, rounds)
-        t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[11], 0x83f44239, rounds)
-        t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[31], 0x2e0b4482, rounds)
-        t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[21], 0xa4842004, rounds)
-        t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[ 8], 0x69c8f04a, rounds)
-        t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[27], 0x9e1f9b5e, rounds)
-        t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[12], 0x21c66842, rounds)
-        t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[ 9], 0xf6e96c9a, rounds)
-
-        t7 = FF_4(t7, t6, t5, t4, t3, t2, t1, t0, w[ 1], 0x670c9c61, rounds)
-        t6 = FF_4(t6, t5, t4, t3, t2, t1, t0, t7, w[29], 0xabd388f0, rounds)
-        t5 = FF_4(t5, t4, t3, t2, t1, t0, t7, t6, w[ 5], 0x6a51a0d2, rounds)
-        t4 = FF_4(t4, t3, t2, t1, t0, t7, t6, t5, w[15], 0xd8542f68, rounds)
-        t3 = FF_4(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x960fa728, rounds)
-        t2 = FF_4(t2, t1, t0, t7, t6, t5, t4, t3, w[10], 0xab5133a3, rounds)
-        t1 = FF_4(t1, t0, t7, t6, t5, t4, t3, t2, w[16], 0x6eef0b6c, rounds)
-        t0 = FF_4(t0, t7, t6, t5, t4, t3, t2, t1, w[13], 0x137a3be4, rounds)
-    }
-
-    if rounds == 5 {
-        t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[27], 0xba3bf050, rounds)
-        t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 3], 0x7efb2a98, rounds)
-        t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[21], 0xa1f1651d, rounds)
-        t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[26], 0x39af0176, rounds)
-        t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[17], 0x66ca593e, rounds)
-        t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[11], 0x82430e88, rounds)
-        t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[20], 0x8cee8619, rounds)
-        t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[29], 0x456f9fb4, rounds)
-
-        t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[19], 0x7d84a5c3, rounds)
-        t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 0], 0x3b8b5ebe, rounds)
-        t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[12], 0xe06f75d8, rounds)
-        t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[ 7], 0x85c12073, rounds)
-        t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[13], 0x401a449f, rounds)
-        t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 8], 0x56c16aa6, rounds)
-        t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[31], 0x4ed3aa62, rounds)
-        t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[10], 0x363f7706, rounds)
-
-        t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[ 5], 0x1bfedf72, rounds)
-        t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[ 9], 0x429b023d, rounds)
-        t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[14], 0x37d0d724, rounds)
-        t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[30], 0xd00a1248, rounds)
-        t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[18], 0xdb0fead3, rounds)
-        t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 6], 0x49f1c09b, rounds)
-        t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[28], 0x075372c9, rounds)
-        t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[24], 0x80991b7b, rounds)
-
-        t7 = FF_5(t7, t6, t5, t4, t3, t2, t1, t0, w[ 2], 0x25d479d8, rounds)
-        t6 = FF_5(t6, t5, t4, t3, t2, t1, t0, t7, w[23], 0xf6e8def7, rounds)
-        t5 = FF_5(t5, t4, t3, t2, t1, t0, t7, t6, w[16], 0xe3fe501a, rounds)
-        t4 = FF_5(t4, t3, t2, t1, t0, t7, t6, t5, w[22], 0xb6794c3b, rounds)
-        t3 = FF_5(t3, t2, t1, t0, t7, t6, t5, t4, w[ 4], 0x976ce0bd, rounds)
-        t2 = FF_5(t2, t1, t0, t7, t6, t5, t4, t3, w[ 1], 0x04c006ba, rounds)
-        t1 = FF_5(t1, t0, t7, t6, t5, t4, t3, t2, w[25], 0xc1a94fb6, rounds)
-        t0 = FF_5(t0, t7, t6, t5, t4, t3, t2, t1, w[15], 0x409f60c4, rounds)
-    }
-
-    ctx.fingerprint[0] += t0
-    ctx.fingerprint[1] += t1
-    ctx.fingerprint[2] += t2
-    ctx.fingerprint[3] += t3
-    ctx.fingerprint[4] += t4
-    ctx.fingerprint[5] += t5
-    ctx.fingerprint[6] += t6
-    ctx.fingerprint[7] += t7
-}
-
-tailor :: proc(ctx: ^Haval_Context, size: u32) {
-    temp: u32
-    switch size {
-        case 128:
-            temp = (ctx.fingerprint[7] & 0x000000ff) | 
-                   (ctx.fingerprint[6] & 0xff000000) | 
-                   (ctx.fingerprint[5] & 0x00ff0000) | 
-                   (ctx.fingerprint[4] & 0x0000ff00)
-            ctx.fingerprint[0] += util.ROTR32(temp, 8)
-
-            temp = (ctx.fingerprint[7] & 0x0000ff00) | 
-                   (ctx.fingerprint[6] & 0x000000ff) | 
-                   (ctx.fingerprint[5] & 0xff000000) | 
-                   (ctx.fingerprint[4] & 0x00ff0000)
-            ctx.fingerprint[1] += util.ROTR32(temp, 16)
-
-            temp = (ctx.fingerprint[7] & 0x00ff0000) | 
-                   (ctx.fingerprint[6] & 0x0000ff00) | 
-                   (ctx.fingerprint[5] & 0x000000ff) | 
-                   (ctx.fingerprint[4] & 0xff000000)
-            ctx.fingerprint[2] += util.ROTR32(temp, 24)
-
-            temp = (ctx.fingerprint[7] & 0xff000000) | 
-                   (ctx.fingerprint[6] & 0x00ff0000) | 
-                   (ctx.fingerprint[5] & 0x0000ff00) | 
-                   (ctx.fingerprint[4] & 0x000000ff)
-            ctx.fingerprint[3] += temp
-        case 160:
-            temp = (ctx.fingerprint[7] & u32(0x3f)) | 
-                   (ctx.fingerprint[6] & u32(0x7f << 25)) |  
-                   (ctx.fingerprint[5] & u32(0x3f << 19))
-            ctx.fingerprint[0] += util.ROTR32(temp, 19)
-
-            temp = (ctx.fingerprint[7] & u32(0x3f <<  6)) | 
-                   (ctx.fingerprint[6] & u32(0x3f)) |  
-                   (ctx.fingerprint[5] & u32(0x7f << 25))
-            ctx.fingerprint[1] += util.ROTR32(temp, 25)
-
-            temp = (ctx.fingerprint[7] & u32(0x7f << 12)) | 
-                   (ctx.fingerprint[6] & u32(0x3f <<  6)) |  
-                   (ctx.fingerprint[5] & u32(0x3f))
-            ctx.fingerprint[2] += temp
-
-            temp = (ctx.fingerprint[7] & u32(0x3f << 19)) | 
-                   (ctx.fingerprint[6] & u32(0x7f << 12)) |  
-                   (ctx.fingerprint[5] & u32(0x3f <<  6))
-            ctx.fingerprint[3] += temp >> 6
-
-            temp = (ctx.fingerprint[7] & u32(0x7f << 25)) | 
-                   (ctx.fingerprint[6] & u32(0x3f << 19)) |  
-                   (ctx.fingerprint[5] & u32(0x7f << 12))
-            ctx.fingerprint[4] += temp >> 12
-        case 192:
-            temp = (ctx.fingerprint[7] & u32(0x1f)) | 
-                   (ctx.fingerprint[6] & u32(0x3f << 26))
-            ctx.fingerprint[0] += util.ROTR32(temp, 26)
-
-            temp = (ctx.fingerprint[7] & u32(0x1f <<  5)) | 
-                   (ctx.fingerprint[6] & u32(0x1f))
-            ctx.fingerprint[1] += temp
-
-            temp = (ctx.fingerprint[7] & u32(0x3f << 10)) | 
-                   (ctx.fingerprint[6] & u32(0x1f <<  5))
-            ctx.fingerprint[2] += temp >> 5
-
-            temp = (ctx.fingerprint[7] & u32(0x1f << 16)) | 
-                   (ctx.fingerprint[6] & u32(0x3f << 10))
-            ctx.fingerprint[3] += temp >> 10
-
-            temp = (ctx.fingerprint[7] & u32(0x1f << 21)) | 
-                   (ctx.fingerprint[6] & u32(0x1f << 16))
-            ctx.fingerprint[4] += temp >> 16
-
-            temp = (ctx.fingerprint[7] & u32(0x3f << 26)) | 
-                   (ctx.fingerprint[6] & u32(0x1f << 21))
-            ctx.fingerprint[5] += temp >> 21
-        case 224:
-            ctx.fingerprint[0] += (ctx.fingerprint[7] >> 27) & 0x1f
-            ctx.fingerprint[1] += (ctx.fingerprint[7] >> 22) & 0x1f
-            ctx.fingerprint[2] += (ctx.fingerprint[7] >> 18) & 0x0f
-            ctx.fingerprint[3] += (ctx.fingerprint[7] >> 13) & 0x1f
-            ctx.fingerprint[4] += (ctx.fingerprint[7] >>  9) & 0x0f
-            ctx.fingerprint[5] += (ctx.fingerprint[7] >>  4) & 0x1f
-            ctx.fingerprint[6] +=  ctx.fingerprint[7]        & 0x0f                
-    }
-}

+ 0 - 584
core/crypto/jh/jh.odin

@@ -1,584 +0,0 @@
-package jh
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the JH hashing algorithm, as defined in <https://www3.ntu.edu.sg/home/wuhj/research/jh/index.html>
-*/
-
-import "core:os"
-import "core:io"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_224 :: 28
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_384 :: 48
-DIGEST_SIZE_512 :: 64
-
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224(transmute([]byte)(data))
-}
-
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: Jh_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 224
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-    hash_stream_224,
-    hash_file_224,
-    hash_bytes_224,
-    hash_string_224,
-    hash_bytes_to_buffer_224,
-    hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Jh_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 256
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-    return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: Jh_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 384
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-    if !load_at_once {
-        return hash_stream_384(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_384(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-    hash_stream_384,
-    hash_file_384,
-    hash_bytes_384,
-    hash_string_384,
-    hash_bytes_to_buffer_384,
-    hash_string_to_buffer_384,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: Jh_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Jh_Context
-    ctx.hashbitlen = 512
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Jh_Context) {
-    assert(ctx.hashbitlen == 224 || ctx.hashbitlen == 256 || ctx.hashbitlen == 384 || ctx.hashbitlen == 512, "hashbitlen must be set to 224, 256, 384 or 512")
-    ctx.H[1] = byte(ctx.hashbitlen)      & 0xff
-    ctx.H[0] = byte(ctx.hashbitlen >> 8) & 0xff
-    F8(ctx)
-}
-
-update :: proc(ctx: ^Jh_Context, data: []byte) {
-    databitlen     := u64(len(data)) * 8
-    ctx.databitlen += databitlen
-    i              := u64(0)
-
-    if (ctx.buffer_size > 0) && ((ctx.buffer_size + databitlen) < 512) {
-        if (databitlen & 7) == 0 {
-            copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)])
-        } else {
-            copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3) + 1])
-        } 
-        ctx.buffer_size += databitlen
-        databitlen = 0
-    }
-
-    if (ctx.buffer_size > 0 ) && ((ctx.buffer_size + databitlen) >= 512) {
-        copy(ctx.buffer[ctx.buffer_size >> 3:], data[:64 - (ctx.buffer_size >> 3)])
-        i      = 64 - (ctx.buffer_size >> 3)
-        databitlen = databitlen - (512 - ctx.buffer_size)
-        F8(ctx)
-        ctx.buffer_size = 0
-    }
-
-    for databitlen >= 512 {
-        copy(ctx.buffer[:], data[i:i + 64])
-        F8(ctx)
-        i += 64
-        databitlen -= 512
-    }
-
-    if databitlen > 0 {
-        if (databitlen & 7) == 0 {
-            copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3)])
-        } else {
-            copy(ctx.buffer[:], data[i:i + ((databitlen & 0x1ff) >> 3) + 1])
-        }
-        ctx.buffer_size = databitlen
-    }
-}
-
-final :: proc(ctx: ^Jh_Context, hash: []byte) {
-    if ctx.databitlen & 0x1ff == 0 {
-        for i := 0; i < 64; i += 1 {
-            ctx.buffer[i] = 0
-        }
-        ctx.buffer[0]  = 0x80
-        ctx.buffer[63] = byte(ctx.databitlen)       & 0xff
-        ctx.buffer[62] = byte(ctx.databitlen >> 8)  & 0xff
-        ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff
-        ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff
-        ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff
-        ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff
-        ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff
-        ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff
-        F8(ctx)
-    } else {
-        if ctx.buffer_size & 7 == 0 {
-            for i := (ctx.databitlen & 0x1ff) >> 3; i < 64; i += 1 {
-                ctx.buffer[i] = 0
-            }
-        } else {
-            for i := ((ctx.databitlen & 0x1ff) >> 3) + 1; i < 64; i += 1 {
-                ctx.buffer[i] = 0
-            }
-        }
-        ctx.buffer[(ctx.databitlen & 0x1ff) >> 3] |= 1 << (7 - (ctx.databitlen & 7))
-        F8(ctx)
-        for i := 0; i < 64; i += 1 {
-            ctx.buffer[i] = 0
-        }
-        ctx.buffer[63] = byte(ctx.databitlen)       & 0xff
-        ctx.buffer[62] = byte(ctx.databitlen >> 8)  & 0xff
-        ctx.buffer[61] = byte(ctx.databitlen >> 16) & 0xff
-        ctx.buffer[60] = byte(ctx.databitlen >> 24) & 0xff
-        ctx.buffer[59] = byte(ctx.databitlen >> 32) & 0xff
-        ctx.buffer[58] = byte(ctx.databitlen >> 40) & 0xff
-        ctx.buffer[57] = byte(ctx.databitlen >> 48) & 0xff
-        ctx.buffer[56] = byte(ctx.databitlen >> 56) & 0xff
-        F8(ctx)
-    }
-    switch ctx.hashbitlen {
-        case 224: copy(hash[:], ctx.H[100:128])
-        case 256: copy(hash[:], ctx.H[96:128])
-        case 384: copy(hash[:], ctx.H[80:128])
-        case 512: copy(hash[:], ctx.H[64:128])
-    }
-}
-
-/*
-    JH implementation
-*/
-
-ROUNDCONSTANT_ZERO := [64]byte {
-    0x6, 0xa, 0x0, 0x9, 0xe, 0x6, 0x6, 0x7,
-    0xf, 0x3, 0xb, 0xc, 0xc, 0x9, 0x0, 0x8,
-    0xb, 0x2, 0xf, 0xb, 0x1, 0x3, 0x6, 0x6,
-    0xe, 0xa, 0x9, 0x5, 0x7, 0xd, 0x3, 0xe,
-    0x3, 0xa, 0xd, 0xe, 0xc, 0x1, 0x7, 0x5,
-    0x1, 0x2, 0x7, 0x7, 0x5, 0x0, 0x9, 0x9,
-    0xd, 0xa, 0x2, 0xf, 0x5, 0x9, 0x0, 0xb,
-    0x0, 0x6, 0x6, 0x7, 0x3, 0x2, 0x2, 0xa,
-}
-
-SBOX := [2][16]byte {
-    {9, 0,  4, 11, 13, 12, 3, 15, 1,  10, 2, 6, 7,  5,  8,  14},
-    {3, 12, 6, 13, 5,  7,  1, 9,  15, 2,  0, 4, 11, 10, 14, 8},
-}
-
-Jh_Context :: struct {
-    hashbitlen:    int,
-    databitlen:    u64,
-    buffer_size:   u64,
-    H:             [128]byte,
-    A:             [256]byte,
-    roundconstant: [64]byte,
-    buffer:        [64]byte,
-}
-
-E8_finaldegroup :: proc(ctx: ^Jh_Context) {
-    t0,t1,t2,t3: byte
-    tem: [256]byte
-    for i := 0; i < 128; i += 1 {
-        tem[i]       = ctx.A[i << 1]
-        tem[i + 128] = ctx.A[(i << 1) + 1]
-    }
-    for i := 0; i < 128; i += 1 {
-        ctx.H[i] = 0
-    }
-    for i := 0; i < 256; i += 1 {
-        t0 = (tem[i] >> 3) & 1
-        t1 = (tem[i] >> 2) & 1
-        t2 = (tem[i] >> 1) & 1
-        t3 = (tem[i] >> 0) & 1
-
-        ctx.H[uint(i) >> 3]         |= t0 << (7 - (uint(i) & 7))
-        ctx.H[(uint(i) + 256) >> 3] |= t1 << (7 - (uint(i) & 7))
-        ctx.H[(uint(i) + 512) >> 3] |= t2 << (7 - (uint(i) & 7))
-        ctx.H[(uint(i) + 768) >> 3] |= t3 << (7 - (uint(i) & 7))
-    }
-}
-
-update_roundconstant :: proc(ctx: ^Jh_Context) {
-    tem: [64]byte
-    t: byte
-    for i := 0; i < 64; i += 1 {
-        tem[i] = SBOX[0][ctx.roundconstant[i]]
-    }
-    for i := 0; i < 64; i += 2 {
-        tem[i + 1] ~= ((tem[i]   << 1)   ~ (tem[i]   >> 3)   ~ ((tem[i]   >> 2) & 2))   & 0xf
-        tem[i]     ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf
-    }
-    for i := 0; i < 64; i += 4 {
-        t          = tem[i + 2]
-        tem[i + 2] = tem[i + 3]
-        tem[i + 3] = t
-    }
-    for i := 0; i < 32; i += 1 {
-        ctx.roundconstant[i]      = tem[i << 1]
-        ctx.roundconstant[i + 32] = tem[(i << 1) + 1]
-    }
-    for i := 32; i < 64; i += 2 {
-        t                        = ctx.roundconstant[i]
-        ctx.roundconstant[i]     = ctx.roundconstant[i + 1]
-        ctx.roundconstant[i + 1] = t
-    }
-}
-
-R8 :: proc(ctx: ^Jh_Context) {
-    t: byte
-    tem, roundconstant_expanded: [256]byte
-    for i := u32(0); i < 256; i += 1 {
-        roundconstant_expanded[i] = (ctx.roundconstant[i >> 2] >> (3 - (i & 3)) ) & 1
-    }
-    for i := 0; i < 256; i += 1 {
-        tem[i] = SBOX[roundconstant_expanded[i]][ctx.A[i]]
-    }
-    for i := 0; i < 256; i += 2 {
-        tem[i+1] ~= ((tem[i]   << 1)   ~ (tem[i]   >> 3)   ~ ((tem[i]   >> 2) & 2))   & 0xf
-        tem[i]   ~= ((tem[i + 1] << 1) ~ (tem[i + 1] >> 3) ~ ((tem[i + 1] >> 2) & 2)) & 0xf
-    }
-    for i := 0; i < 256; i += 4 {
-        t        = tem[i + 2]
-        tem[i+2] = tem[i + 3]
-        tem[i+3] = t
-    }
-    for i := 0; i < 128; i += 1 {
-        ctx.A[i]       = tem[i << 1]
-        ctx.A[i + 128] = tem[(i << 1) + 1]
-    }
-    for i := 128; i < 256; i += 2 {
-        t            = ctx.A[i]
-        ctx.A[i]     = ctx.A[i + 1]
-        ctx.A[i + 1] = t
-    }
-}
-
-E8_initialgroup :: proc(ctx: ^Jh_Context) {
-    t0, t1, t2, t3: byte
-    tem:            [256]byte
-    for i := u32(0); i < 256; i += 1 {
-        t0     = (ctx.H[i >> 3]   >> (7 - (i & 7)))       & 1
-        t1     = (ctx.H[(i + 256) >> 3] >> (7 - (i & 7))) & 1
-        t2     = (ctx.H[(i + 512) >> 3] >> (7 - (i & 7))) & 1
-        t3     = (ctx.H[(i + 768) >> 3] >> (7 - (i & 7))) & 1
-        tem[i] = (t0 << 3) | (t1 << 2) | (t2 << 1) | (t3 << 0)
-    }
-    for i := 0; i < 128; i += 1 {
-        ctx.A[i << 1]       = tem[i]
-        ctx.A[(i << 1) + 1] = tem[i + 128]
-    }
-}
-
-E8 :: proc(ctx: ^Jh_Context) {
-    for i := 0; i < 64; i += 1 {
-        ctx.roundconstant[i] = ROUNDCONSTANT_ZERO[i]
-    }
-    E8_initialgroup(ctx)
-    for i := 0; i < 42; i += 1 {
-        R8(ctx)
-        update_roundconstant(ctx)
-    }
-    E8_finaldegroup(ctx)
-}
-
-F8 :: proc(ctx: ^Jh_Context) {
-    for i := 0; i < 64; i += 1 {
-        ctx.H[i] ~= ctx.buffer[i]
-    }
-    E8(ctx)
-    for i := 0; i < 64; i += 1 {
-        ctx.H[i + 64] ~= ctx.buffer[i]
-    }
-}

+ 0 - 374
core/crypto/keccak/keccak.odin

@@ -1,374 +0,0 @@
-package keccak
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Interface for the Keccak hashing algorithm.
-    This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
-*/
-
-import "core:os"
-import "core:io"
-
-import "../_sha3"
-
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_224 :: 28
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_384 :: 48
-DIGEST_SIZE_512 :: 64
-
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224(transmute([]byte)(data))
-}
-
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-    hash_stream_224,
-    hash_file_224,
-    hash_bytes_224,
-    hash_string_224,
-    hash_bytes_to_buffer_224,
-    hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-    return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-    if !load_at_once {
-        return hash_stream_384(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_384(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-    hash_stream_384,
-    hash_file_384,
-    hash_bytes_384,
-    hash_string_384,
-    hash_bytes_to_buffer_384,
-    hash_string_to_buffer_384,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    ctx.is_keccak = true
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-Keccak_Context :: _sha3.Sha3_Context
-
-init :: proc(ctx: ^_sha3.Sha3_Context) {
-    ctx.is_keccak = true
-    _sha3.init(ctx)
-}
-
-update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
-    _sha3.update(ctx, data)
-}
-
-final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
-    _sha3.final(ctx, hash)
-}

+ 10 - 0
core/crypto/legacy/README.md

@@ -0,0 +1,10 @@
+# crypto/legacy
+
+These are algorithms that are shipped solely for the purpose of
+interoperability with legacy systems.  The use of these packages in
+any other capacity is discouraged, especially those that are known
+to be broken.
+
+- keccak - The draft version of the algorithm that became SHA-3
+- MD5 - Broken (https://eprint.iacr.org/2005/075)
+- SHA-1 - Broken (https://eprint.iacr.org/2017/190)

+ 377 - 0
core/crypto/legacy/keccak/keccak.odin

@@ -0,0 +1,377 @@
+package keccak
+
+/*
+    Copyright 2021 zhibog
+    Made available under the BSD-3 license.
+
+    List of contributors:
+        zhibog, dotbmp:  Initial implementation.
+
+    Interface for the Keccak hashing algorithm.
+    This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
+*/
+
+import "core:io"
+import "core:os"
+
+import "../../_sha3"
+
+/*
+    High level API
+*/
+
+DIGEST_SIZE_224 :: 28
+DIGEST_SIZE_256 :: 32
+DIGEST_SIZE_384 :: 48
+DIGEST_SIZE_512 :: 64
+
+// hash_string_224 will hash the given input and return the
+// computed hash
+hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
+	return hash_bytes_224(transmute([]byte)(data))
+}
+
+// hash_bytes_224 will hash the given input and return the
+// computed hash
+hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
+	hash: [DIGEST_SIZE_224]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer_224 will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer_224 will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream_224 will read the stream in chunks and compute a
+// hash from its contents
+hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
+	hash: [DIGEST_SIZE_224]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	ctx.is_keccak = true
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file_224 will read the file provided by the given handle
+// and compute a hash
+hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
+	if !load_at_once {
+		return hash_stream_224(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_224(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_224]byte{}, false
+}
+
+hash_224 :: proc {
+	hash_stream_224,
+	hash_file_224,
+	hash_bytes_224,
+	hash_string_224,
+	hash_bytes_to_buffer_224,
+	hash_string_to_buffer_224,
+}
+
+// hash_string_256 will hash the given input and return the
+// computed hash
+hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
+	return hash_bytes_256(transmute([]byte)(data))
+}
+
+// hash_bytes_256 will hash the given input and return the
+// computed hash
+hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer_256 will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer_256 will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream_256 will read the stream in chunks and compute a
+// hash from its contents
+hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	ctx.is_keccak = true
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file_256 will read the file provided by the given handle
+// and compute a hash
+hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
+	if !load_at_once {
+		return hash_stream_256(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_256(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_256]byte{}, false
+}
+
+hash_256 :: proc {
+	hash_stream_256,
+	hash_file_256,
+	hash_bytes_256,
+	hash_string_256,
+	hash_bytes_to_buffer_256,
+	hash_string_to_buffer_256,
+}
+
+// hash_string_384 will hash the given input and return the
+// computed hash
+hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
+	return hash_bytes_384(transmute([]byte)(data))
+}
+
+// hash_bytes_384 will hash the given input and return the
+// computed hash
+hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
+	hash: [DIGEST_SIZE_384]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer_384 will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer_384 will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream_384 will read the stream in chunks and compute a
+// hash from its contents
+hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
+	hash: [DIGEST_SIZE_384]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	ctx.is_keccak = true
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file_384 will read the file provided by the given handle
+// and compute a hash
+hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
+	if !load_at_once {
+		return hash_stream_384(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_384(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_384]byte{}, false
+}
+
+hash_384 :: proc {
+	hash_stream_384,
+	hash_file_384,
+	hash_bytes_384,
+	hash_string_384,
+	hash_bytes_to_buffer_384,
+	hash_string_to_buffer_384,
+}
+
+// hash_string_512 will hash the given input and return the
+// computed hash
+hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
+	return hash_bytes_512(transmute([]byte)(data))
+}
+
+// hash_bytes_512 will hash the given input and return the
+// computed hash
+hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
+	hash: [DIGEST_SIZE_512]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer_512 will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer_512 will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	ctx.is_keccak = true
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream_512 will read the stream in chunks and compute a
+// hash from its contents
+hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
+	hash: [DIGEST_SIZE_512]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	ctx.is_keccak = true
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file_512 will read the file provided by the given handle
+// and compute a hash
+hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
+	if !load_at_once {
+		return hash_stream_512(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_512(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_512]byte{}, false
+}
+
+hash_512 :: proc {
+	hash_stream_512,
+	hash_file_512,
+	hash_bytes_512,
+	hash_string_512,
+	hash_bytes_to_buffer_512,
+	hash_string_to_buffer_512,
+}
+
+/*
+    Low level API
+*/
+
+Context :: _sha3.Sha3_Context
+
+init :: proc(ctx: ^Context) {
+	ctx.is_keccak = true
+	_sha3.init(ctx)
+}
+
+update :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(ctx, data)
+}
+
+final :: proc(ctx: ^Context, hash: []byte) {
+	_sha3.final(ctx, hash)
+}

+ 295 - 0
core/crypto/legacy/md5/md5.odin

@@ -0,0 +1,295 @@
+package md5
+
+/*
+    Copyright 2021 zhibog
+    Made available under the BSD-3 license.
+
+    List of contributors:
+        zhibog, dotbmp:  Initial implementation.
+
+    Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
+*/
+
+import "core:encoding/endian"
+import "core:io"
+import "core:math/bits"
+import "core:mem"
+import "core:os"
+
+/*
+    High level API
+*/
+
+DIGEST_SIZE :: 16
+
+// hash_string will hash the given input and return the
+// computed hash
+hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
+	return hash_bytes(transmute([]byte)(data))
+}
+
+// hash_bytes will hash the given input and return the
+// computed hash
+hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer :: proc(data, hash: []byte) {
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream will read the stream in chunks and compute a
+// hash from its contents
+hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file will read the file provided by the given handle
+// and compute a hash
+hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
+	if !load_at_once {
+		return hash_stream(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE]byte{}, false
+}
+
+hash :: proc {
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
+}
+
+/*
+    Low level API
+*/
+
+init :: proc(ctx: ^Context) {
+	ctx.state[0] = 0x67452301
+	ctx.state[1] = 0xefcdab89
+	ctx.state[2] = 0x98badcfe
+	ctx.state[3] = 0x10325476
+
+	ctx.bitlen = 0
+	ctx.datalen = 0
+
+	ctx.is_initialized = true
+}
+
+update :: proc(ctx: ^Context, data: []byte) {
+	assert(ctx.is_initialized)
+
+	for i := 0; i < len(data); i += 1 {
+		ctx.data[ctx.datalen] = data[i]
+		ctx.datalen += 1
+		if (ctx.datalen == BLOCK_SIZE) {
+			transform(ctx, ctx.data[:])
+			ctx.bitlen += 512
+			ctx.datalen = 0
+		}
+	}
+}
+
+final :: proc(ctx: ^Context, hash: []byte) {
+	assert(ctx.is_initialized)
+
+	if len(hash) < DIGEST_SIZE {
+		panic("crypto/md5: invalid destination digest size")
+	}
+
+	i := ctx.datalen
+
+	if ctx.datalen < 56 {
+		ctx.data[i] = 0x80
+		i += 1
+		for i < 56 {
+			ctx.data[i] = 0x00
+			i += 1
+		}
+	} else if ctx.datalen >= 56 {
+		ctx.data[i] = 0x80
+		i += 1
+		for i < BLOCK_SIZE {
+			ctx.data[i] = 0x00
+			i += 1
+		}
+		transform(ctx, ctx.data[:])
+		mem.set(&ctx.data, 0, 56)
+	}
+
+	ctx.bitlen += u64(ctx.datalen * 8)
+	endian.unchecked_put_u64le(ctx.data[56:], ctx.bitlen)
+	transform(ctx, ctx.data[:])
+
+	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
+		endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
+	}
+
+	ctx.is_initialized = false
+}
+
+/*
+    MD5 implementation
+*/
+
+BLOCK_SIZE :: 64
+
+Context :: struct {
+	data:    [BLOCK_SIZE]byte,
+	state:   [4]u32,
+	bitlen:  u64,
+	datalen: u32,
+
+	is_initialized: bool,
+}
+
+/*
+    @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
+    and II respectively, instead of declaring them separately.
+*/
+
+@(private)
+FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
+	return b + bits.rotate_left32(a + ((b & c) | (~b & d)) + m + t, s)
+}
+
+@(private)
+GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
+	return b + bits.rotate_left32(a + ((b & d) | (c & ~d)) + m + t, s)
+}
+
+@(private)
+HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
+	return b + bits.rotate_left32(a + (b ~ c ~ d) + m + t, s)
+}
+
+@(private)
+II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
+	return b + bits.rotate_left32(a + (c ~ (b | ~d)) + m + t, s)
+}
+
+@(private)
+transform :: proc "contextless" (ctx: ^Context, data: []byte) {
+	m: [DIGEST_SIZE]u32
+
+	for i := 0; i < DIGEST_SIZE; i += 1 {
+		m[i] = endian.unchecked_get_u32le(data[i * 4:])
+	}
+
+	a := ctx.state[0]
+	b := ctx.state[1]
+	c := ctx.state[2]
+	d := ctx.state[3]
+
+	a = FF(a, b, c, d, m[0], 7, 0xd76aa478)
+	d = FF(d, a, b, c, m[1], 12, 0xe8c7b756)
+	c = FF(c, d, a, b, m[2], 17, 0x242070db)
+	b = FF(b, c, d, a, m[3], 22, 0xc1bdceee)
+	a = FF(a, b, c, d, m[4], 7, 0xf57c0faf)
+	d = FF(d, a, b, c, m[5], 12, 0x4787c62a)
+	c = FF(c, d, a, b, m[6], 17, 0xa8304613)
+	b = FF(b, c, d, a, m[7], 22, 0xfd469501)
+	a = FF(a, b, c, d, m[8], 7, 0x698098d8)
+	d = FF(d, a, b, c, m[9], 12, 0x8b44f7af)
+	c = FF(c, d, a, b, m[10], 17, 0xffff5bb1)
+	b = FF(b, c, d, a, m[11], 22, 0x895cd7be)
+	a = FF(a, b, c, d, m[12], 7, 0x6b901122)
+	d = FF(d, a, b, c, m[13], 12, 0xfd987193)
+	c = FF(c, d, a, b, m[14], 17, 0xa679438e)
+	b = FF(b, c, d, a, m[15], 22, 0x49b40821)
+
+	a = GG(a, b, c, d, m[1], 5, 0xf61e2562)
+	d = GG(d, a, b, c, m[6], 9, 0xc040b340)
+	c = GG(c, d, a, b, m[11], 14, 0x265e5a51)
+	b = GG(b, c, d, a, m[0], 20, 0xe9b6c7aa)
+	a = GG(a, b, c, d, m[5], 5, 0xd62f105d)
+	d = GG(d, a, b, c, m[10], 9, 0x02441453)
+	c = GG(c, d, a, b, m[15], 14, 0xd8a1e681)
+	b = GG(b, c, d, a, m[4], 20, 0xe7d3fbc8)
+	a = GG(a, b, c, d, m[9], 5, 0x21e1cde6)
+	d = GG(d, a, b, c, m[14], 9, 0xc33707d6)
+	c = GG(c, d, a, b, m[3], 14, 0xf4d50d87)
+	b = GG(b, c, d, a, m[8], 20, 0x455a14ed)
+	a = GG(a, b, c, d, m[13], 5, 0xa9e3e905)
+	d = GG(d, a, b, c, m[2], 9, 0xfcefa3f8)
+	c = GG(c, d, a, b, m[7], 14, 0x676f02d9)
+	b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a)
+
+	a = HH(a, b, c, d, m[5], 4, 0xfffa3942)
+	d = HH(d, a, b, c, m[8], 11, 0x8771f681)
+	c = HH(c, d, a, b, m[11], 16, 0x6d9d6122)
+	b = HH(b, c, d, a, m[14], 23, 0xfde5380c)
+	a = HH(a, b, c, d, m[1], 4, 0xa4beea44)
+	d = HH(d, a, b, c, m[4], 11, 0x4bdecfa9)
+	c = HH(c, d, a, b, m[7], 16, 0xf6bb4b60)
+	b = HH(b, c, d, a, m[10], 23, 0xbebfbc70)
+	a = HH(a, b, c, d, m[13], 4, 0x289b7ec6)
+	d = HH(d, a, b, c, m[0], 11, 0xeaa127fa)
+	c = HH(c, d, a, b, m[3], 16, 0xd4ef3085)
+	b = HH(b, c, d, a, m[6], 23, 0x04881d05)
+	a = HH(a, b, c, d, m[9], 4, 0xd9d4d039)
+	d = HH(d, a, b, c, m[12], 11, 0xe6db99e5)
+	c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8)
+	b = HH(b, c, d, a, m[2], 23, 0xc4ac5665)
+
+	a = II(a, b, c, d, m[0], 6, 0xf4292244)
+	d = II(d, a, b, c, m[7], 10, 0x432aff97)
+	c = II(c, d, a, b, m[14], 15, 0xab9423a7)
+	b = II(b, c, d, a, m[5], 21, 0xfc93a039)
+	a = II(a, b, c, d, m[12], 6, 0x655b59c3)
+	d = II(d, a, b, c, m[3], 10, 0x8f0ccc92)
+	c = II(c, d, a, b, m[10], 15, 0xffeff47d)
+	b = II(b, c, d, a, m[1], 21, 0x85845dd1)
+	a = II(a, b, c, d, m[8], 6, 0x6fa87e4f)
+	d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0)
+	c = II(c, d, a, b, m[6], 15, 0xa3014314)
+	b = II(b, c, d, a, m[13], 21, 0x4e0811a1)
+	a = II(a, b, c, d, m[4], 6, 0xf7537e82)
+	d = II(d, a, b, c, m[11], 10, 0xbd3af235)
+	c = II(c, d, a, b, m[2], 15, 0x2ad7d2bb)
+	b = II(b, c, d, a, m[9], 21, 0xeb86d391)
+
+	ctx.state[0] += a
+	ctx.state[1] += b
+	ctx.state[2] += c
+	ctx.state[3] += d
+}

+ 252 - 0
core/crypto/legacy/sha1/sha1.odin

@@ -0,0 +1,252 @@
+package sha1
+
+/*
+    Copyright 2021 zhibog
+    Made available under the BSD-3 license.
+
+    List of contributors:
+        zhibog, dotbmp:  Initial implementation.
+
+    Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
+*/
+
+import "core:encoding/endian"
+import "core:io"
+import "core:math/bits"
+import "core:mem"
+import "core:os"
+
+/*
+    High level API
+*/
+
+DIGEST_SIZE :: 20
+
+// hash_string will hash the given input and return the
+// computed hash
+hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
+	return hash_bytes(transmute([]byte)(data))
+}
+
+// hash_bytes will hash the given input and return the
+// computed hash
+hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
+}
+
+// hash_string_to_buffer will hash the given input and assign the
+// computed hash to the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_string_to_buffer :: proc(data: string, hash: []byte) {
+	hash_bytes_to_buffer(transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer will hash the given input and write the
+// computed hash into the second parameter.
+// It requires that the destination buffer is at least as big as the digest size
+hash_bytes_to_buffer :: proc(data, hash: []byte) {
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream will read the stream in chunks and compute a
+// hash from its contents
+hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
+}
+
+// hash_file will read the file provided by the given handle
+// and compute a hash
+hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
+	if !load_at_once {
+		return hash_stream(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE]byte{}, false
+}
+
+hash :: proc {
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
+}
+
+/*
+    Low level API
+*/
+
+init :: proc(ctx: ^Context) {
+	ctx.state[0] = 0x67452301
+	ctx.state[1] = 0xefcdab89
+	ctx.state[2] = 0x98badcfe
+	ctx.state[3] = 0x10325476
+	ctx.state[4] = 0xc3d2e1f0
+	ctx.k[0] = 0x5a827999
+	ctx.k[1] = 0x6ed9eba1
+	ctx.k[2] = 0x8f1bbcdc
+	ctx.k[3] = 0xca62c1d6
+
+	ctx.datalen = 0
+	ctx.bitlen = 0
+
+	ctx.is_initialized = true
+}
+
+update :: proc(ctx: ^Context, data: []byte) {
+	assert(ctx.is_initialized)
+
+	for i := 0; i < len(data); i += 1 {
+		ctx.data[ctx.datalen] = data[i]
+		ctx.datalen += 1
+		if (ctx.datalen == BLOCK_SIZE) {
+			transform(ctx, ctx.data[:])
+			ctx.bitlen += 512
+			ctx.datalen = 0
+		}
+	}
+}
+
+final :: proc(ctx: ^Context, hash: []byte) {
+	assert(ctx.is_initialized)
+
+	if len(hash) < DIGEST_SIZE {
+		panic("crypto/sha1: invalid destination digest size")
+	}
+
+	i := ctx.datalen
+
+	if ctx.datalen < 56 {
+		ctx.data[i] = 0x80
+		i += 1
+		for i < 56 {
+			ctx.data[i] = 0x00
+			i += 1
+		}
+	} else {
+		ctx.data[i] = 0x80
+		i += 1
+		for i < BLOCK_SIZE {
+			ctx.data[i] = 0x00
+			i += 1
+		}
+		transform(ctx, ctx.data[:])
+		mem.set(&ctx.data, 0, 56)
+	}
+
+	ctx.bitlen += u64(ctx.datalen * 8)
+	endian.unchecked_put_u64be(ctx.data[56:], ctx.bitlen)
+	transform(ctx, ctx.data[:])
+
+	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
+		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
+	}
+
+	ctx.is_initialized = false
+}
+
+/*
+    SHA1 implementation
+*/
+
+BLOCK_SIZE :: 64
+
+Context :: struct {
+	data:    [BLOCK_SIZE]byte,
+	datalen: u32,
+	bitlen:  u64,
+	state:   [5]u32,
+	k:       [4]u32,
+
+	is_initialized: bool,
+}
+
+@(private)
+transform :: proc "contextless" (ctx: ^Context, data: []byte) {
+	a, b, c, d, e, i, t: u32
+	m: [80]u32
+
+	for i = 0; i < 16; i += 1 {
+		m[i] = endian.unchecked_get_u32be(data[i * 4:])
+	}
+	for i < 80 {
+		m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16])
+		m[i] = (m[i] << 1) | (m[i] >> 31)
+		i += 1
+	}
+
+	a = ctx.state[0]
+	b = ctx.state[1]
+	c = ctx.state[2]
+	d = ctx.state[3]
+	e = ctx.state[4]
+
+	for i = 0; i < 20; i += 1 {
+		t = bits.rotate_left32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i]
+		e = d
+		d = c
+		c = bits.rotate_left32(b, 30)
+		b = a
+		a = t
+	}
+	for i < 40 {
+		t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i]
+		e = d
+		d = c
+		c = bits.rotate_left32(b, 30)
+		b = a
+		a = t
+		i += 1
+	}
+	for i < 60 {
+		t = bits.rotate_left32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i]
+		e = d
+		d = c
+		c = bits.rotate_left32(b, 30)
+		b = a
+		a = t
+		i += 1
+	}
+	for i < 80 {
+		t = bits.rotate_left32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i]
+		e = d
+		d = c
+		c = bits.rotate_left32(b, 30)
+		b = a
+		a = t
+		i += 1
+	}
+
+	ctx.state[0] += a
+	ctx.state[1] += b
+	ctx.state[2] += c
+	ctx.state[3] += d
+	ctx.state[4] += e
+}

+ 0 - 182
core/crypto/md2/md2.odin

@@ -1,182 +0,0 @@
-package md2
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the MD2 hashing algorithm, as defined in RFC 1319 <https://datatracker.ietf.org/doc/html/rfc1319>
-*/
-
-import "core:os"
-import "core:io"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 16
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Md2_Context
-    // init(&ctx) No-op
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Md2_Context
-    // init(&ctx) No-op
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Md2_Context
-	// init(&ctx) No-op
-	buf := make([]byte, 512)
-	defer delete(buf)
-	read := 1
-	for read > 0 {
-        read, _ = io.read(s, buf)
-	    if read > 0 {
-			update(&ctx, buf[:read])
-	    } 
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-@(warning="Init is a no-op for MD2")
-init :: proc(ctx: ^Md2_Context) {
-	// No action needed here
-}
-
-update :: proc(ctx: ^Md2_Context, data: []byte) {
-	for i := 0; i < len(data); i += 1 {
-		ctx.data[ctx.datalen] = data[i]
-		ctx.datalen += 1
-		if (ctx.datalen == DIGEST_SIZE) {
-			transform(ctx, ctx.data[:])
-			ctx.datalen = 0
-		}
-	}
-}
-
-final :: proc(ctx: ^Md2_Context, hash: []byte) {
-	to_pad := byte(DIGEST_SIZE - ctx.datalen)
-    for ctx.datalen < DIGEST_SIZE {
-        ctx.data[ctx.datalen] = to_pad
-		ctx.datalen += 1
-    }
-	transform(ctx, ctx.data[:])
-	transform(ctx, ctx.checksum[:])
-    for i := 0; i < DIGEST_SIZE; i += 1 {
-        hash[i] = ctx.state[i]
-    }
-}
-
-/*
-    MD2 implementation
-*/
-
-Md2_Context :: struct {
-    data:     [DIGEST_SIZE]byte,
-    state:    [DIGEST_SIZE * 3]byte,
-    checksum: [DIGEST_SIZE]byte,
-    datalen:  int,
-}
-
-PI_TABLE := [?]byte {
-	41,  46,  67,  201, 162, 216, 124, 1,   61,  54,  84,  161, 236, 240, 6,
-	19,  98,  167, 5,   243, 192, 199, 115, 140, 152, 147, 43,  217, 188, 76,
-	130, 202, 30,  155, 87,  60,  253, 212, 224, 22,  103, 66,  111, 24,  138, 
-	23,  229, 18,  190, 78,  196, 214, 218, 158, 222, 73,  160, 251, 245, 142,
-	187, 47,  238, 122, 169, 104, 121, 145, 21,  178, 7,   63,  148, 194, 16,
-	137, 11,  34,  95,  33,  128, 127, 93,  154, 90,  144, 50,  39,  53,  62, 
-	204, 231, 191, 247, 151, 3,   255, 25,  48,  179, 72,  165, 181, 209, 215,
-	94,  146, 42,  172, 86,  170, 198, 79,  184, 56,  210, 150, 164, 125, 182,
-	118, 252, 107, 226, 156, 116, 4,   241, 69,  157, 112, 89,  100, 113, 135,
-	32,  134, 91,  207, 101, 230, 45,  168, 2,   27,  96,  37,  173, 174, 176,
-	185, 246, 28,  70,  97,  105, 52,  64,  126, 15,  85,  71,  163, 35,  221,
-	81,  175, 58,  195, 92,  249, 206, 186, 197, 234, 38,  44,  83,  13,  110,
-	133, 40,  132, 9,   211, 223, 205, 244, 65,  129, 77,  82,  106, 220, 55,
-	200, 108, 193, 171, 250, 36,  225, 123, 8,   12,  189, 177, 74,  120, 136,
-	149, 139, 227, 99,  232, 109, 233, 203, 213, 254, 59,  0,   29,  57,  242,
-	239, 183, 14,  102, 88,  208, 228, 166, 119, 114, 248, 235, 117, 75,  10,
-	49,  68,  80,  180, 143, 237, 31,  26,  219, 153, 141, 51,  159, 17,  131,
-	20,
-}
-
-transform :: proc(ctx: ^Md2_Context, data: []byte) {
-    j,k,t: byte
-	for j = 0; j < DIGEST_SIZE; j += 1 {
-		ctx.state[j + DIGEST_SIZE] = data[j]
-		ctx.state[j + DIGEST_SIZE * 2] = (ctx.state[j + DIGEST_SIZE] ~ ctx.state[j])
-	}
-	t = 0
-	for j = 0; j < DIGEST_SIZE + 2; j += 1 {
-		for k = 0; k < DIGEST_SIZE * 3; k += 1 {
-			ctx.state[k] ~= PI_TABLE[t]
-			t = ctx.state[k]
-		}
-		t = (t + j) & 0xff
-	}
-	t = ctx.checksum[DIGEST_SIZE - 1]
-	for j = 0; j < DIGEST_SIZE; j += 1 {
-		ctx.checksum[j] ~= PI_TABLE[data[j] ~ t]
-		t = ctx.checksum[j]
-	}
-}

+ 0 - 263
core/crypto/md4/md4.odin

@@ -1,263 +0,0 @@
-package md4
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-        Jeroen van Rijn: Context design to be able to change from Odin implementation to bindings.
-
-    Implementation of the MD4 hashing algorithm, as defined in RFC 1320 <https://datatracker.ietf.org/doc/html/rfc1320>
-*/
-
-import "core:mem"
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 16
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: Md4_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Md4_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: Md4_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Md4_Context) {
-    ctx.state[0] = 0x67452301
-    ctx.state[1] = 0xefcdab89
-    ctx.state[2] = 0x98badcfe
-    ctx.state[3] = 0x10325476
-}
-
-update :: proc(ctx: ^Md4_Context, data: []byte) {
-    for i := 0; i < len(data); i += 1 {
-        ctx.data[ctx.datalen] = data[i]
-        ctx.datalen += 1
-        if(ctx.datalen == BLOCK_SIZE) {
-            transform(ctx, ctx.data[:])
-            ctx.bitlen += 512
-            ctx.datalen = 0
-        }
-    }
-}
-
-final :: proc(ctx: ^Md4_Context, hash: []byte) {
-    i := ctx.datalen
-    if ctx.datalen < 56 {
-        ctx.data[i] = 0x80
-        i += 1
-        for i < 56 {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-    } else if ctx.datalen >= 56 {
-        ctx.data[i] = 0x80
-        i += 1
-        for i < BLOCK_SIZE {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-        transform(ctx, ctx.data[:])
-        mem.set(&ctx.data, 0, 56)
-    }
-
-    ctx.bitlen  += u64(ctx.datalen * 8)
-    ctx.data[56] = byte(ctx.bitlen)
-    ctx.data[57] = byte(ctx.bitlen >> 8)
-    ctx.data[58] = byte(ctx.bitlen >> 16)
-    ctx.data[59] = byte(ctx.bitlen >> 24)
-    ctx.data[60] = byte(ctx.bitlen >> 32)
-    ctx.data[61] = byte(ctx.bitlen >> 40)
-    ctx.data[62] = byte(ctx.bitlen >> 48)
-    ctx.data[63] = byte(ctx.bitlen >> 56)
-    transform(ctx, ctx.data[:])
-
-    for i = 0; i < 4; i += 1 {
-        hash[i]      = byte(ctx.state[0] >> (i * 8)) & 0x000000ff
-        hash[i + 4]  = byte(ctx.state[1] >> (i * 8)) & 0x000000ff
-        hash[i + 8]  = byte(ctx.state[2] >> (i * 8)) & 0x000000ff
-        hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff
-    }
-}
-
-/*
-    MD4 implementation
-*/
-
-BLOCK_SIZE  :: 64
-
-Md4_Context :: struct {
-    data:    [64]byte,
-    state:   [4]u32,
-    bitlen:  u64,
-    datalen: u32,
-}
-
-/*
-    @note(zh): F, G and H, as mentioned in the RFC, have been inlined into FF, GG 
-    and HH respectively, instead of declaring them separately.
-*/
-
-FF :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
-    return util.ROTL32(a + ((b & c) | (~b & d)) + x, s)
-}
-
-GG :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
-    return util.ROTL32(a + ((b & c) | (b & d) | (c & d)) + x + 0x5a827999, s)
-}
-
-HH :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 {
-    return util.ROTL32(a + (b ~ c ~ d) + x + 0x6ed9eba1, s)
-}
-
-transform :: proc(ctx: ^Md4_Context, data: []byte) {
-    a, b, c, d, i, j: u32
-    m: [DIGEST_SIZE]u32
-
-    for i, j = 0, 0; i < DIGEST_SIZE; i += 1 {
-        m[i] = u32(data[j]) | (u32(data[j + 1]) << 8) | (u32(data[j + 2]) << 16) | (u32(data[j + 3]) << 24)
-        j += 4
-    }
-
-    a = ctx.state[0]
-    b = ctx.state[1]
-    c = ctx.state[2]
-    d = ctx.state[3]
-
-    a = FF(a, b, c, d, m[0],  3)
-    d = FF(d, a, b, c, m[1],  7)
-    c = FF(c, d, a, b, m[2],  11)
-    b = FF(b, c, d, a, m[3],  19)
-    a = FF(a, b, c, d, m[4],  3)
-    d = FF(d, a, b, c, m[5],  7)
-    c = FF(c, d, a, b, m[6],  11)
-    b = FF(b, c, d, a, m[7],  19)
-    a = FF(a, b, c, d, m[8],  3)
-    d = FF(d, a, b, c, m[9],  7)
-    c = FF(c, d, a, b, m[10], 11)
-    b = FF(b, c, d, a, m[11], 19)
-    a = FF(a, b, c, d, m[12], 3)
-    d = FF(d, a, b, c, m[13], 7)
-    c = FF(c, d, a, b, m[14], 11)
-    b = FF(b, c, d, a, m[15], 19)
-
-    a = GG(a, b, c, d, m[0],  3)
-    d = GG(d, a, b, c, m[4],  5)
-    c = GG(c, d, a, b, m[8],  9)
-    b = GG(b, c, d, a, m[12], 13)
-    a = GG(a, b, c, d, m[1],  3)
-    d = GG(d, a, b, c, m[5],  5)
-    c = GG(c, d, a, b, m[9],  9)
-    b = GG(b, c, d, a, m[13], 13)
-    a = GG(a, b, c, d, m[2],  3)
-    d = GG(d, a, b, c, m[6],  5)
-    c = GG(c, d, a, b, m[10], 9)
-    b = GG(b, c, d, a, m[14], 13)
-    a = GG(a, b, c, d, m[3],  3)
-    d = GG(d, a, b, c, m[7],  5)
-    c = GG(c, d, a, b, m[11], 9)
-    b = GG(b, c, d, a, m[15], 13)
-
-    a = HH(a, b, c, d, m[0],  3)
-    d = HH(d, a, b, c, m[8],  9)
-    c = HH(c, d, a, b, m[4],  11)
-    b = HH(b, c, d, a, m[12], 15)
-    a = HH(a, b, c, d, m[2],  3)
-    d = HH(d, a, b, c, m[10], 9)
-    c = HH(c, d, a, b, m[6],  11)
-    b = HH(b, c, d, a, m[14], 15)
-    a = HH(a, b, c, d, m[1],  3)
-    d = HH(d, a, b, c, m[9],  9)
-    c = HH(c, d, a, b, m[5],  11)
-    b = HH(b, c, d, a, m[13], 15)
-    a = HH(a, b, c, d, m[3],  3)
-    d = HH(d, a, b, c, m[11], 9)
-    c = HH(c, d, a, b, m[7],  11)
-    b = HH(b, c, d, a, m[15], 15)
-
-    ctx.state[0] += a
-    ctx.state[1] += b
-    ctx.state[2] += c
-    ctx.state[3] += d
-}

+ 0 - 285
core/crypto/md5/md5.odin

@@ -1,285 +0,0 @@
-package md5
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
-*/
-
-import "core:mem"
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 16
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: Md5_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Md5_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: Md5_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Md5_Context) {
-    ctx.state[0] = 0x67452301
-    ctx.state[1] = 0xefcdab89
-    ctx.state[2] = 0x98badcfe
-    ctx.state[3] = 0x10325476
-}
-
-update :: proc(ctx: ^Md5_Context, data: []byte) {
-    for i := 0; i < len(data); i += 1 {
-        ctx.data[ctx.datalen] = data[i]
-        ctx.datalen += 1
-        if(ctx.datalen == BLOCK_SIZE) {
-            transform(ctx, ctx.data[:])
-            ctx.bitlen += 512
-            ctx.datalen = 0
-        }
-    }
-}
-
-final :: proc(ctx: ^Md5_Context, hash: []byte){
-    i : u32
-    i = ctx.datalen
-
-    if ctx.datalen < 56 {
-        ctx.data[i] = 0x80
-        i += 1
-        for i < 56 {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-    } else if ctx.datalen >= 56 {
-        ctx.data[i] = 0x80
-        i += 1
-        for i < BLOCK_SIZE {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-        transform(ctx, ctx.data[:])
-        mem.set(&ctx.data, 0, 56)
-    }
-
-    ctx.bitlen  += u64(ctx.datalen * 8)
-    ctx.data[56] = byte(ctx.bitlen)
-    ctx.data[57] = byte(ctx.bitlen >> 8)
-    ctx.data[58] = byte(ctx.bitlen >> 16)
-    ctx.data[59] = byte(ctx.bitlen >> 24)
-    ctx.data[60] = byte(ctx.bitlen >> 32)
-    ctx.data[61] = byte(ctx.bitlen >> 40)
-    ctx.data[62] = byte(ctx.bitlen >> 48)
-    ctx.data[63] = byte(ctx.bitlen >> 56)
-    transform(ctx, ctx.data[:])
-
-    for i = 0; i < 4; i += 1 {
-        hash[i]      = byte(ctx.state[0] >> (i * 8)) & 0x000000ff
-        hash[i + 4]  = byte(ctx.state[1] >> (i * 8)) & 0x000000ff
-        hash[i + 8]  = byte(ctx.state[2] >> (i * 8)) & 0x000000ff
-        hash[i + 12] = byte(ctx.state[3] >> (i * 8)) & 0x000000ff
-    }
-}
-
-/*
-    MD4 implementation
-*/
-
-BLOCK_SIZE  :: 64
-
-Md5_Context :: struct {
-    data:    [BLOCK_SIZE]byte,
-    state:   [4]u32,
-    bitlen:  u64,
-    datalen: u32,
-}
-
-/*
-    @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH 
-    and II respectively, instead of declaring them separately.
-*/
-
-FF :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
-    return b + util.ROTL32(a + ((b & c) | (~b & d)) + m + t, s)
-}
-
-GG :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
-    return b + util.ROTL32(a + ((b & d) | (c & ~d)) + m + t, s)
-}
-
-HH :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
-    return b + util.ROTL32(a + (b ~ c ~ d) + m + t, s)
-}
-
-II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u32 {
-    return b + util.ROTL32(a + (c ~ (b | ~d)) + m + t, s)
-}
-
-transform :: proc(ctx: ^Md5_Context, data: []byte) {
-    i, j: u32
-    m: [DIGEST_SIZE]u32
-
-    for i, j = 0, 0; i < DIGEST_SIZE; i+=1 {
-        m[i] = u32(data[j]) + u32(data[j + 1]) << 8 + u32(data[j + 2]) << 16 + u32(data[j + 3]) << 24
-        j += 4
-    }
-
-    a := ctx.state[0]
-    b := ctx.state[1]
-    c := ctx.state[2]
-    d := ctx.state[3]
-
-    a = FF(a, b, c, d, m[0],   7, 0xd76aa478)
-    d = FF(d, a, b, c, m[1],  12, 0xe8c7b756)
-    c = FF(c, d, a, b, m[2],  17, 0x242070db)
-    b = FF(b, c, d, a, m[3],  22, 0xc1bdceee)
-    a = FF(a, b, c, d, m[4],   7, 0xf57c0faf)
-    d = FF(d, a, b, c, m[5],  12, 0x4787c62a)
-    c = FF(c, d, a, b, m[6],  17, 0xa8304613)
-    b = FF(b, c, d, a, m[7],  22, 0xfd469501)
-    a = FF(a, b, c, d, m[8],   7, 0x698098d8)
-    d = FF(d, a, b, c, m[9],  12, 0x8b44f7af)
-    c = FF(c, d, a, b, m[10], 17, 0xffff5bb1)
-    b = FF(b, c, d, a, m[11], 22, 0x895cd7be)
-    a = FF(a, b, c, d, m[12],  7, 0x6b901122)
-    d = FF(d, a, b, c, m[13], 12, 0xfd987193)
-    c = FF(c, d, a, b, m[14], 17, 0xa679438e)
-    b = FF(b, c, d, a, m[15], 22, 0x49b40821)
-
-    a = GG(a, b, c, d, m[1],   5, 0xf61e2562)
-    d = GG(d, a, b, c, m[6],   9, 0xc040b340)
-    c = GG(c, d, a, b, m[11], 14, 0x265e5a51)
-    b = GG(b, c, d, a, m[0],  20, 0xe9b6c7aa)
-    a = GG(a, b, c, d, m[5],   5, 0xd62f105d)
-    d = GG(d, a, b, c, m[10],  9, 0x02441453)
-    c = GG(c, d, a, b, m[15], 14, 0xd8a1e681)
-    b = GG(b, c, d, a, m[4],  20, 0xe7d3fbc8)
-    a = GG(a, b, c, d, m[9],   5, 0x21e1cde6)
-    d = GG(d, a, b, c, m[14],  9, 0xc33707d6)
-    c = GG(c, d, a, b, m[3],  14, 0xf4d50d87)
-    b = GG(b, c, d, a, m[8],  20, 0x455a14ed)
-    a = GG(a, b, c, d, m[13],  5, 0xa9e3e905)
-    d = GG(d, a, b, c, m[2],   9, 0xfcefa3f8)
-    c = GG(c, d, a, b, m[7],  14, 0x676f02d9)
-    b = GG(b, c, d, a, m[12], 20, 0x8d2a4c8a)
-
-    a = HH(a, b, c, d, m[5],   4, 0xfffa3942)
-    d = HH(d, a, b, c, m[8],  11, 0x8771f681)
-    c = HH(c, d, a, b, m[11], 16, 0x6d9d6122)
-    b = HH(b, c, d, a, m[14], 23, 0xfde5380c)
-    a = HH(a, b, c, d, m[1],   4, 0xa4beea44)
-    d = HH(d, a, b, c, m[4],  11, 0x4bdecfa9)
-    c = HH(c, d, a, b, m[7],  16, 0xf6bb4b60)
-    b = HH(b, c, d, a, m[10], 23, 0xbebfbc70)
-    a = HH(a, b, c, d, m[13],  4, 0x289b7ec6)
-    d = HH(d, a, b, c, m[0],  11, 0xeaa127fa)
-    c = HH(c, d, a, b, m[3],  16, 0xd4ef3085)
-    b = HH(b, c, d, a, m[6],  23, 0x04881d05)
-    a = HH(a, b, c, d, m[9],   4, 0xd9d4d039)
-    d = HH(d, a, b, c, m[12], 11, 0xe6db99e5)
-    c = HH(c, d, a, b, m[15], 16, 0x1fa27cf8)
-    b = HH(b, c, d, a, m[2],  23, 0xc4ac5665)
-
-    a = II(a, b, c, d, m[0],   6, 0xf4292244)
-    d = II(d, a, b, c, m[7],  10, 0x432aff97)
-    c = II(c, d, a, b, m[14], 15, 0xab9423a7)
-    b = II(b, c, d, a, m[5],  21, 0xfc93a039)
-    a = II(a, b, c, d, m[12],  6, 0x655b59c3)
-    d = II(d, a, b, c, m[3],  10, 0x8f0ccc92)
-    c = II(c, d, a, b, m[10], 15, 0xffeff47d)
-    b = II(b, c, d, a, m[1],  21, 0x85845dd1)
-    a = II(a, b, c, d, m[8],   6, 0x6fa87e4f)
-    d = II(d, a, b, c, m[15], 10, 0xfe2ce6e0)
-    c = II(c, d, a, b, m[6],  15, 0xa3014314)
-    b = II(b, c, d, a, m[13], 21, 0x4e0811a1)
-    a = II(a, b, c, d, m[4],   6, 0xf7537e82)
-    d = II(d, a, b, c, m[11], 10, 0xbd3af235)
-    c = II(c, d, a, b, m[2],  15, 0x2ad7d2bb)
-    b = II(b, c, d, a, m[9],  21, 0xeb86d391)
-
-    ctx.state[0] += a
-    ctx.state[1] += b
-    ctx.state[2] += c
-    ctx.state[3] += d
-}

+ 4 - 4
core/crypto/poly1305/poly1305.odin

@@ -1,8 +1,8 @@
 package poly1305
 package poly1305
 
 
 import "core:crypto"
 import "core:crypto"
-import "core:crypto/util"
 import field "core:crypto/_fiat/field_poly1305"
 import field "core:crypto/_fiat/field_poly1305"
+import "core:encoding/endian"
 import "core:mem"
 import "core:mem"
 
 
 KEY_SIZE :: 32
 KEY_SIZE :: 32
@@ -52,8 +52,8 @@ init :: proc (ctx: ^Context, key: []byte) {
 
 
 	// r = le_bytes_to_num(key[0..15])
 	// r = le_bytes_to_num(key[0..15])
 	// r = clamp(r) (r &= 0xffffffc0ffffffc0ffffffc0fffffff)
 	// r = clamp(r) (r &= 0xffffffc0ffffffc0ffffffc0fffffff)
-	tmp_lo := util.U64_LE(key[0:8]) & 0x0ffffffc0fffffff
-	tmp_hi := util.U64_LE(key[8:16]) & 0xffffffc0ffffffc
+	tmp_lo := endian.unchecked_get_u64le(key[0:]) & 0x0ffffffc0fffffff
+	tmp_hi := endian.unchecked_get_u64le(key[8:]) & 0xffffffc0ffffffc
 	field.fe_from_u64s(&ctx._r, tmp_lo, tmp_hi)
 	field.fe_from_u64s(&ctx._r, tmp_lo, tmp_hi)
 
 
 	// s = le_bytes_to_num(key[16..31])
 	// s = le_bytes_to_num(key[16..31])
@@ -151,7 +151,7 @@ _blocks :: proc (ctx: ^Context, msg: []byte, final := false) {
 	data_len := len(data)
 	data_len := len(data)
 	for data_len >= _BLOCK_SIZE {
 	for data_len >= _BLOCK_SIZE {
 		// n = le_bytes_to_num(msg[((i-1)*16)..*i*16] | [0x01])
 		// n = le_bytes_to_num(msg[((i-1)*16)..*i*16] | [0x01])
-		field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte, false)
+		field.fe_from_bytes(&n, data[:_BLOCK_SIZE], final_byte)
 
 
 		// a += n
 		// a += n
 		field.fe_add(field.fe_relax_cast(&ctx._a), &ctx._a, &n) // _a unreduced
 		field.fe_add(field.fe_relax_cast(&ctx._a), &ctx._a, &n) // _a unreduced

+ 0 - 919
core/crypto/ripemd/ripemd.odin

@@ -1,919 +0,0 @@
-package ripemd
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation for the RIPEMD hashing algorithm as defined in <https://homes.esat.kuleuven.be/~bosselae/ripemd160.html>
-*/
-
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_160 :: 20
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_320 :: 40
-
-// hash_string_128 will hash the given input and return the
-// computed hash
-hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128(transmute([]byte)(data))
-}
-
-// hash_bytes_128 will hash the given input and return the
-// computed hash
-hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Ripemd128_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: Ripemd128_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_128 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: Ripemd128_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128 will read the file provided by the given handle
-// and compute a hash
-hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128 :: proc {
-    hash_stream_128,
-    hash_file_128,
-    hash_bytes_128,
-    hash_string_128,
-    hash_bytes_to_buffer_128,
-    hash_string_to_buffer_128,
-}
-
-// hash_string_160 will hash the given input and return the
-// computed hash
-hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160(transmute([]byte)(data))
-}
-
-// hash_bytes_160 will hash the given input and return the
-// computed hash
-hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Ripemd160_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: Ripemd160_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_160 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: Ripemd160_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160 will read the file provided by the given handle
-// and compute a hash
-hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160 :: proc {
-    hash_stream_160,
-    hash_file_160,
-    hash_bytes_160,
-    hash_string_160,
-    hash_bytes_to_buffer_160,
-    hash_string_to_buffer_160,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Ripemd256_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Ripemd256_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Ripemd256_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_320 will hash the given input and return the
-// computed hash
-hash_string_320 :: proc(data: string) -> [DIGEST_SIZE_320]byte {
-    return hash_bytes_320(transmute([]byte)(data))
-}
-
-// hash_bytes_320 will hash the given input and return the
-// computed hash
-hash_bytes_320 :: proc(data: []byte) -> [DIGEST_SIZE_320]byte {
-    hash: [DIGEST_SIZE_320]byte
-    ctx: Ripemd320_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_320 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_320 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_320(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_320 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_320 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_320, "Size of destination buffer is smaller than the digest size")
-    ctx: Ripemd320_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream_320 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_320 :: proc(s: io.Stream) -> ([DIGEST_SIZE_320]byte, bool) {
-    hash: [DIGEST_SIZE_320]byte
-    ctx: Ripemd320_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_320 will read the file provided by the given handle
-// and compute a hash
-hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_320]byte, bool) {
-    if !load_at_once {
-        return hash_stream_320(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_320(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_320]byte{}, false
-}
-
-hash_320 :: proc {
-    hash_stream_320,
-    hash_file_320,
-    hash_bytes_320,
-    hash_string_320,
-    hash_bytes_to_buffer_320,
-    hash_string_to_buffer_320,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^$T) {
-    when T == Ripemd128_Context {
-        ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3
-    } else when T == Ripemd160_Context {
-        ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4
-    } else when T == Ripemd256_Context {
-        ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3] = S0, S1, S2, S3
-        ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7] = S5, S6, S7, S8
-    } else when T == Ripemd320_Context {
-        ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4] = S0, S1, S2, S3, S4
-        ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9] = S5, S6, S7, S8, S9
-    }
-}
-
-update :: proc(ctx: ^$T, data: []byte) {
-    ctx.tc += u64(len(data))
-    data := data
-    if ctx.nx > 0 {
-        n := len(data)
-
-        when T == Ripemd128_Context {
-            if n > RIPEMD_128_BLOCK_SIZE - ctx.nx {
-                n = RIPEMD_128_BLOCK_SIZE - ctx.nx
-            }
-        } else when T == Ripemd160_Context {
-            if n > RIPEMD_160_BLOCK_SIZE - ctx.nx {
-                n = RIPEMD_160_BLOCK_SIZE - ctx.nx
-            }
-        } else when T == Ripemd256_Context{
-            if n > RIPEMD_256_BLOCK_SIZE - ctx.nx {
-                n = RIPEMD_256_BLOCK_SIZE - ctx.nx
-            }
-        } else when T == Ripemd320_Context{
-            if n > RIPEMD_320_BLOCK_SIZE - ctx.nx {
-                n = RIPEMD_320_BLOCK_SIZE - ctx.nx
-            }
-        }
-
-        for i := 0; i < n; i += 1 {
-            ctx.x[ctx.nx + i] = data[i]
-        }
-
-        ctx.nx += n
-        when T == Ripemd128_Context {
-            if ctx.nx == RIPEMD_128_BLOCK_SIZE {
-                block(ctx, ctx.x[0:])
-                ctx.nx = 0
-            }
-        } else when T == Ripemd160_Context {
-            if ctx.nx == RIPEMD_160_BLOCK_SIZE {
-                block(ctx, ctx.x[0:])
-                ctx.nx = 0
-            }
-        } else when T == Ripemd256_Context{
-            if ctx.nx == RIPEMD_256_BLOCK_SIZE {
-                block(ctx, ctx.x[0:])
-                ctx.nx = 0
-            }
-        } else when T == Ripemd320_Context{
-            if ctx.nx == RIPEMD_320_BLOCK_SIZE {
-                block(ctx, ctx.x[0:])
-                ctx.nx = 0
-            }
-        }
-        data = data[n:]
-    }
-    n := block(ctx, data)
-    data = data[n:]
-    if len(data) > 0 {
-        ctx.nx = copy(ctx.x[:], data)
-    }
-}
-
-final :: proc(ctx: ^$T, hash: []byte) {
-    d := ctx
-    tc := d.tc
-    tmp: [64]byte
-    tmp[0] = 0x80
-
-    if tc % 64 < 56 {
-        update(d, tmp[0:56 - tc % 64])
-    } else {
-        update(d, tmp[0:64 + 56 - tc % 64])
-    }
-
-    tc <<= 3
-    for i : u32 = 0; i < 8; i += 1 {
-        tmp[i] = byte(tc >> (8 * i))
-    }
-
-    update(d, tmp[0:8])
-
-    when T == Ripemd128_Context {
-        size :: RIPEMD_128_SIZE
-    } else when T == Ripemd160_Context {
-        size :: RIPEMD_160_SIZE
-    } else when T == Ripemd256_Context{
-        size :: RIPEMD_256_SIZE
-    } else when T == Ripemd320_Context{
-        size :: RIPEMD_320_SIZE
-    }
-
-    digest: [size]byte
-    for s, i in d.s {
-        digest[i * 4]     = byte(s)
-        digest[i * 4 + 1] = byte(s >> 8)
-        digest[i * 4 + 2] = byte(s >> 16)
-        digest[i * 4 + 3] = byte(s >> 24)
-    }
-    copy(hash[:], digest[:])
-}
-
-
-/*
-    RIPEMD implementation
-*/
-
-Ripemd128_Context :: struct {
-	s:  [4]u32,
-	x:  [RIPEMD_128_BLOCK_SIZE]byte,
-	nx: int,
-	tc: u64,
-}
-
-Ripemd160_Context :: struct {
-	s:  [5]u32,
-	x:  [RIPEMD_160_BLOCK_SIZE]byte,
-	nx: int,
-	tc: u64,
-}
-
-Ripemd256_Context :: struct {
-	s:  [8]u32,
-	x:  [RIPEMD_256_BLOCK_SIZE]byte,
-	nx: int,
-	tc: u64,
-}
-
-Ripemd320_Context :: struct {
-	s:  [10]u32,
-	x:  [RIPEMD_320_BLOCK_SIZE]byte,
-	nx: int,
-	tc: u64,
-}
-
-RIPEMD_128_SIZE       :: 16
-RIPEMD_128_BLOCK_SIZE :: 64
-RIPEMD_160_SIZE       :: 20
-RIPEMD_160_BLOCK_SIZE :: 64
-RIPEMD_256_SIZE       :: 32
-RIPEMD_256_BLOCK_SIZE :: 64
-RIPEMD_320_SIZE       :: 40
-RIPEMD_320_BLOCK_SIZE :: 64
-
-S0 :: 0x67452301
-S1 :: 0xefcdab89
-S2 :: 0x98badcfe
-S3 :: 0x10325476
-S4 :: 0xc3d2e1f0
-S5 :: 0x76543210
-S6 :: 0xfedcba98
-S7 :: 0x89abcdef
-S8 :: 0x01234567
-S9 :: 0x3c2d1e0f
-
-RIPEMD_128_N0 := [64]uint {
-	0, 1,  2,  3,  4,  5,  6,  7, 8,  9, 10, 11, 12, 13, 14, 15,
-	7, 4,  13, 1,  10, 6,  15, 3, 12, 0, 9,  5,  2,  14, 11, 8,
-	3, 10, 14, 4,  9,  15, 8,  1, 2,  7, 0,  6,  13, 11, 5,  12,
-	1, 9,  11, 10, 0,  8,  12, 4, 13, 3, 7,  15, 14, 5,  6,  2,
-}
-
-RIPEMD_128_R0 := [64]uint {
-	11, 14, 15, 12, 5,  8,  7,  9,  11, 13, 14, 15, 6,  7,  9,  8,
-	7,  6,  8,  13, 11, 9,  7,  15, 7,  12, 15, 9,  11, 7,  13, 12,
-	11, 13, 6,  7,  14, 9,  13, 15, 14, 8,  13, 6,  5,  12, 7,  5,
-	11, 12, 14, 15, 14, 15, 9,  8,  9,  14, 5,  6,  8,  6,  5,  12,
-}
-
-RIPEMD_128_N1 := [64]uint {
-	5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12,
-	6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2,
-	15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13,
-	8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14,
-}
-
-RIPEMD_128_R1 := [64]uint {
-	8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6,
-	9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11,
-	9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5,
-	15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8,
-}
-
-RIPEMD_160_N0 := [80]uint {
-	0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
-	7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8,
-	3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12,
-	1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2,
-	4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13,
-}
-
-RIPEMD_160_R0 := [80]uint {
-	11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8,
-	7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12,
-	11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5,
-	11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12,
-	9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6,
-}
-
-RIPEMD_160_N1 := [80]uint {
-	5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12,
-	6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2,
-	15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13,
-	8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14,
-	12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11,
-}
-
-RIPEMD_160_R1 := [80]uint {
-	8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6,
-	9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11,
-	9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5,
-	15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8,
-	8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11,
-}
-
-block :: #force_inline proc (ctx: ^$T, p: []byte) -> int {
-    when T == Ripemd128_Context {
-    	return ripemd_128_block(ctx, p)
-    }
-    else when T == Ripemd160_Context {
-    	return ripemd_160_block(ctx, p)
-    }
-    else when T == Ripemd256_Context {
-    	return ripemd_256_block(ctx, p)
-    }
-    else when T == Ripemd320_Context {
-    	return ripemd_320_block(ctx, p)
-    }
-}
-
-ripemd_128_block :: proc(ctx: ^$T, p: []byte) -> int {
-	n := 0
-	x: [16]u32 = ---
-	alpha: u32 = ---
-	p := p
-	for len(p) >= RIPEMD_128_BLOCK_SIZE {
-		a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3]
-		aa, bb, cc, dd := a, b, c, d
-		for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
-			x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
-		}
-		i := 0
-		for i < 16 {
-			alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]]
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd= dd, alpha, bb, cc
-			i += 1
-		}
-		for i < 32 {
-			alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		for i < 48 {
-			alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		for i < 64 {
-			alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]]
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		c = ctx.s[1] + c + dd
-		ctx.s[1] = ctx.s[2] + d + aa
-		ctx.s[2] = ctx.s[3] + a + bb
-		ctx.s[3] = ctx.s[0] + b + cc
-		ctx.s[0] = c
-		p = p[RIPEMD_128_BLOCK_SIZE:]
-		n += RIPEMD_128_BLOCK_SIZE
-	}
-	return n
-}
-
-ripemd_160_block :: proc(ctx: ^$T, p: []byte) -> int {
-    n := 0
-	x: [16]u32 = ---
-	alpha, beta: u32 = ---, ---
-	p := p
-	for len(p) >= RIPEMD_160_BLOCK_SIZE {
-		a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4]
-		aa, bb, cc, dd, ee := a, b, c, d, e
-		for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
-			x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
-		}
-		i := 0
-		for i < 16 {
-			alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]]
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		for i < 32 {
-			alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		for i < 48 {
-			alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		for i < 64 {
-			alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		for i < 80 {
-			alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]]
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		dd += c + ctx.s[1]
-		ctx.s[1] = ctx.s[2] + d + ee
-		ctx.s[2] = ctx.s[3] + e + aa
-		ctx.s[3] = ctx.s[4] + a + bb
-		ctx.s[4] = ctx.s[0] + b + cc
-		ctx.s[0] = dd
-		p = p[RIPEMD_160_BLOCK_SIZE:]
-		n += RIPEMD_160_BLOCK_SIZE
-	}
-	return n
-}
-
-ripemd_256_block :: proc(ctx: ^$T, p: []byte) -> int {
-	n := 0
-	x: [16]u32 = ---
-	alpha: u32 = ---
-	p := p
-	for len(p) >= RIPEMD_256_BLOCK_SIZE {
-		a, b, c, d := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3]
-		aa, bb, cc, dd := ctx.s[4], ctx.s[5], ctx.s[6], ctx.s[7]
-		for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
-			x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
-		}
-		i := 0
-		for i < 16 {
-			alpha = a + (b ~ c ~ d) + x[RIPEMD_128_N0[i]]
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (bb & dd | cc &~ dd) + x[RIPEMD_128_N1[i]] + 0x50a28be6
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd= dd, alpha, bb, cc
-			i += 1
-		}
-		t := a
-		a = aa
-		aa = t
-		for i < 32 {
-			alpha = a + (d ~ (b & (c~d))) + x[RIPEMD_128_N0[i]] + 0x5a827999
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (dd ~ (bb | ~cc)) + x[RIPEMD_128_N1[i]] + 0x5c4dd124
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		t = b
-		b = bb
-		bb = t
-		for i < 48 {
-			alpha = a + (d ~ (b | ~c)) + x[RIPEMD_128_N0[i]] + 0x6ed9eba1
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (dd ~ (bb & (cc~dd))) + x[RIPEMD_128_N1[i]] + 0x6d703ef3
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		t = c
-		c = cc
-		cc = t
-		for i < 64 {
-			alpha = a + (c ~ (d & (b~c))) + x[RIPEMD_128_N0[i]] + 0x8f1bbcdc
-			s := int(RIPEMD_128_R0[i])
-			alpha = util.ROTL32(alpha, s)
-			a, b, c, d = d, alpha, b, c
-			alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_128_N1[i]]
-			s = int(RIPEMD_128_R1[i])
-			alpha = util.ROTL32(alpha, s)
-			aa, bb, cc, dd = dd, alpha, bb, cc
-			i += 1
-		}
-		t = d
-		d = dd
-		dd = t
-		ctx.s[0] += a
-		ctx.s[1] += b
-		ctx.s[2] += c
-		ctx.s[3] += d
-		ctx.s[4] += aa
-		ctx.s[5] += bb
-		ctx.s[6] += cc
-		ctx.s[7] += dd
-		p = p[RIPEMD_256_BLOCK_SIZE:]
-		n += RIPEMD_256_BLOCK_SIZE
-	}
-	return n
-}
-
-ripemd_320_block :: proc(ctx: ^$T, p: []byte) -> int {
-    n := 0
-	x: [16]u32 = ---
-	alpha, beta: u32 = ---, ---
-	p := p
-	for len(p) >= RIPEMD_320_BLOCK_SIZE {
-		a, b, c, d, e := ctx.s[0], ctx.s[1], ctx.s[2], ctx.s[3], ctx.s[4]
-		aa, bb, cc, dd, ee := ctx.s[5], ctx.s[6], ctx.s[7], ctx.s[8], ctx.s[9]
-		for i,j := 0, 0; i < 16; i, j = i+1, j+4 {
-			x[i] = u32(p[j]) | u32(p[j+1])<<8 | u32(p[j+2])<<16 | u32(p[j+3])<<24
-		}
-		i := 0
-		for i < 16 {
-			alpha = a + (b ~ c ~ d) + x[RIPEMD_160_N0[i]]
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb ~ (cc | ~dd)) + x[RIPEMD_160_N1[i]] + 0x50a28be6
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		t := b
-		b = bb
-		bb = t
-		for i < 32 {
-			alpha = a + (b&c | ~b&d) + x[RIPEMD_160_N0[i]] + 0x5a827999
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb&dd | cc&~dd) + x[RIPEMD_160_N1[i]] + 0x5c4dd124
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		t = d
-		d = dd
-		dd = t
-		for i < 48 {
-			alpha = a + (b | ~c ~ d) + x[RIPEMD_160_N0[i]] + 0x6ed9eba1
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb | ~cc ~ dd) + x[RIPEMD_160_N1[i]] + 0x6d703ef3
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		t = a
-		a = aa
-		aa = t
-		for i < 64 {
-			alpha = a + (b&d | c&~d) + x[RIPEMD_160_N0[i]] + 0x8f1bbcdc
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb&cc | ~bb&dd) + x[RIPEMD_160_N1[i]] + 0x7a6d76e9
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		t = c
-		c = cc
-		cc = t
-		for i < 80 {
-			alpha = a + (b ~ (c | ~d)) + x[RIPEMD_160_N0[i]] + 0xa953fd4e
-			s := int(RIPEMD_160_R0[i])
-			alpha = util.ROTL32(alpha, s) + e
-			beta = util.ROTL32(c, 10)
-			a, b, c, d, e = e, alpha, b, beta, d
-			alpha = aa + (bb ~ cc ~ dd) + x[RIPEMD_160_N1[i]]
-			s = int(RIPEMD_160_R1[i])
-			alpha = util.ROTL32(alpha, s) + ee
-			beta = util.ROTL32(cc, 10)
-			aa, bb, cc, dd, ee = ee, alpha, bb, beta, dd
-			i += 1
-		}
-		t = e
-		e = ee
-		ee = t
-		ctx.s[0] += a
-		ctx.s[1] += b
-		ctx.s[2] += c
-		ctx.s[3] += d
-		ctx.s[4] += e
-		ctx.s[5] += aa
-		ctx.s[6] += bb
-		ctx.s[7] += cc
-		ctx.s[8] += dd
-		ctx.s[9] += ee
-		p = p[RIPEMD_320_BLOCK_SIZE:]
-		n += RIPEMD_320_BLOCK_SIZE
-	}
-	return n
-}

+ 0 - 246
core/crypto/sha1/sha1.odin

@@ -1,246 +0,0 @@
-package sha1
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
-*/
-
-import "core:mem"
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 20
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: Sha1_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Sha1_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: Sha1_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Sha1_Context) {
-	ctx.state[0] = 0x67452301
-	ctx.state[1] = 0xefcdab89
-	ctx.state[2] = 0x98badcfe
-	ctx.state[3] = 0x10325476
-	ctx.state[4] = 0xc3d2e1f0
-	ctx.k[0]     = 0x5a827999
-	ctx.k[1]     = 0x6ed9eba1
-	ctx.k[2]     = 0x8f1bbcdc
-	ctx.k[3]     = 0xca62c1d6
-}
-
-update :: proc(ctx: ^Sha1_Context, data: []byte) {
-	for i := 0; i < len(data); i += 1 {
-		ctx.data[ctx.datalen] = data[i]
-		ctx.datalen += 1
-		if (ctx.datalen == BLOCK_SIZE) {
-			transform(ctx, ctx.data[:])
-			ctx.bitlen += 512
-			ctx.datalen = 0
-		}
-	}
-}
-
-final :: proc(ctx: ^Sha1_Context, hash: []byte) {
-	i := ctx.datalen
-
-	if ctx.datalen < 56 {
-		ctx.data[i] = 0x80
-        i += 1
-        for i < 56 {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-	}
-	else {
-		ctx.data[i] = 0x80
-        i += 1
-        for i < BLOCK_SIZE {
-            ctx.data[i] = 0x00
-            i += 1
-        }
-		transform(ctx, ctx.data[:])
-		mem.set(&ctx.data, 0, 56)
-	}
-
-	ctx.bitlen  += u64(ctx.datalen * 8)
-	ctx.data[63] = u8(ctx.bitlen)
-	ctx.data[62] = u8(ctx.bitlen >> 8)
-	ctx.data[61] = u8(ctx.bitlen >> 16)
-	ctx.data[60] = u8(ctx.bitlen >> 24)
-	ctx.data[59] = u8(ctx.bitlen >> 32)
-	ctx.data[58] = u8(ctx.bitlen >> 40)
-	ctx.data[57] = u8(ctx.bitlen >> 48)
-	ctx.data[56] = u8(ctx.bitlen >> 56)
-	transform(ctx, ctx.data[:])
-
-	for j: u32 = 0; j < 4; j += 1 {
-		hash[j]      = u8(ctx.state[0] >> (24 - j * 8)) & 0x000000ff
-		hash[j + 4]  = u8(ctx.state[1] >> (24 - j * 8)) & 0x000000ff
-		hash[j + 8]  = u8(ctx.state[2] >> (24 - j * 8)) & 0x000000ff
-		hash[j + 12] = u8(ctx.state[3] >> (24 - j * 8)) & 0x000000ff
-		hash[j + 16] = u8(ctx.state[4] >> (24 - j * 8)) & 0x000000ff
-	}
-}
-
-/*
-    SHA1 implementation
-*/
-
-BLOCK_SIZE  :: 64
-
-Sha1_Context :: struct {
-    data:    [BLOCK_SIZE]byte,
-    datalen: u32,
-    bitlen:  u64,
-    state:   [5]u32,
-    k:       [4]u32,
-}
-
-transform :: proc(ctx: ^Sha1_Context, data: []byte) {
-    a, b, c, d, e, i, j, t: u32
-    m: [80]u32
-
-	for i, j = 0, 0; i < 16; i += 1 {
-        m[i] = u32(data[j]) << 24 + u32(data[j + 1]) << 16 + u32(data[j + 2]) << 8 + u32(data[j + 3])
-        j += 4
-    }
-	for i < 80 {
-		m[i] = (m[i - 3] ~ m[i - 8] ~ m[i - 14] ~ m[i - 16])
-		m[i] = (m[i] << 1) | (m[i] >> 31)
-        i += 1
-	}
-
-	a = ctx.state[0]
-	b = ctx.state[1]
-	c = ctx.state[2]
-	d = ctx.state[3]
-	e = ctx.state[4]
-
-	for i = 0; i < 20; i += 1 {
-		t = util.ROTL32(a, 5) + ((b & c) ~ (~b & d)) + e + ctx.k[0] + m[i]
-		e = d
-		d = c
-		c = util.ROTL32(b, 30)
-		b = a
-		a = t
-	}
-	for i < 40 {
-		t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[1] + m[i]
-		e = d
-		d = c
-		c = util.ROTL32(b, 30)
-		b = a
-		a = t
-        i += 1
-	}
-	for i < 60 {
-		t = util.ROTL32(a, 5) + ((b & c) ~ (b & d) ~ (c & d)) + e + ctx.k[2] + m[i]
-		e = d
-		d = c
-		c = util.ROTL32(b, 30)
-		b = a
-		a = t
-        i += 1
-	}
-	for i < 80 {
-		t = util.ROTL32(a, 5) + (b ~ c ~ d) + e + ctx.k[3] + m[i]
-		e = d
-		d = c
-		c = util.ROTL32(b, 30)
-		b = a
-		a = t
-        i += 1
-	}
-
-	ctx.state[0] += a
-	ctx.state[1] += b
-	ctx.state[2] += c
-	ctx.state[3] += d
-	ctx.state[4] += e
-}

File diff suppressed because it is too large
+ 529 - 409
core/crypto/sha2/sha2.odin


+ 188 - 184
core/crypto/sha3/sha3.odin

@@ -11,8 +11,8 @@ package sha3
     If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
     If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
 */
 */
 
 
-import "core:os"
 import "core:io"
 import "core:io"
+import "core:os"
 
 
 import "../_sha3"
 import "../_sha3"
 
 
@@ -28,333 +28,337 @@ DIGEST_SIZE_512 :: 64
 // hash_string_224 will hash the given input and return the
 // hash_string_224 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
 hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-    return hash_bytes_224(transmute([]byte)(data))
+	return hash_bytes_224(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_224 will hash the given input and return the
 // hash_bytes_224 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_224]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_224 will hash the given input and assign the
 // hash_string_to_buffer_224 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_224 will hash the given input and write the
 // hash_bytes_to_buffer_224 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream_224 will read the stream in chunks and compute a
 // hash_stream_224 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-    hash: [DIGEST_SIZE_224]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_224
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_224]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_224
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_224 will read the file provided by the given handle
 // hash_file_224 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
 hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-    if !load_at_once {
-        return hash_stream_224(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_224(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_224]byte{}, false
+	if !load_at_once {
+		return hash_stream_224(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_224(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_224]byte{}, false
 }
 }
 
 
 hash_224 :: proc {
 hash_224 :: proc {
-    hash_stream_224,
-    hash_file_224,
-    hash_bytes_224,
-    hash_string_224,
-    hash_bytes_to_buffer_224,
-    hash_string_to_buffer_224,
+	hash_stream_224,
+	hash_file_224,
+	hash_bytes_224,
+	hash_string_224,
+	hash_bytes_to_buffer_224,
+	hash_string_to_buffer_224,
 }
 }
 
 
 // hash_string_256 will hash the given input and return the
 // hash_string_256 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
+	return hash_bytes_256(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_256 will hash the given input and return the
 // hash_bytes_256 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_256 will hash the given input and assign the
 // hash_string_to_buffer_256 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_256 will hash the given input and write the
 // hash_bytes_to_buffer_256 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_256 will read the file provided by the given handle
 // hash_file_256 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
+	if !load_at_once {
+		return hash_stream_256(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_256(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_256]byte{}, false
 }
 }
 
 
 hash_256 :: proc {
 hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
+	hash_stream_256,
+	hash_file_256,
+	hash_bytes_256,
+	hash_string_256,
+	hash_bytes_to_buffer_256,
+	hash_string_to_buffer_256,
 }
 }
 
 
 // hash_string_384 will hash the given input and return the
 // hash_string_384 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
 hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-    return hash_bytes_384(transmute([]byte)(data))
+	return hash_bytes_384(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_384 will hash the given input and return the
 // hash_bytes_384 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_384]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_384 will hash the given input and assign the
 // hash_string_to_buffer_384 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_384 will hash the given input and write the
 // hash_bytes_to_buffer_384 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream_384 will read the stream in chunks and compute a
 // hash_stream_384 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-    hash: [DIGEST_SIZE_384]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_384
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_384]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_384
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_384 will read the file provided by the given handle
 // hash_file_384 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
 hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-    if !load_at_once {
-        return hash_stream_384(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_384(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_384]byte{}, false
+	if !load_at_once {
+		return hash_stream_384(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_384(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_384]byte{}, false
 }
 }
 
 
 hash_384 :: proc {
 hash_384 :: proc {
-    hash_stream_384,
-    hash_file_384,
-    hash_bytes_384,
-    hash_string_384,
-    hash_bytes_to_buffer_384,
-    hash_string_to_buffer_384,
+	hash_stream_384,
+	hash_file_384,
+	hash_bytes_384,
+	hash_string_384,
+	hash_bytes_to_buffer_384,
+	hash_string_to_buffer_384,
 }
 }
 
 
 // hash_string_512 will hash the given input and return the
 // hash_string_512 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
+	return hash_bytes_512(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_512 will hash the given input and return the
 // hash_bytes_512 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_512]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_512 will hash the given input and assign the
 // hash_string_to_buffer_512 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_512 will hash the given input and write the
 // hash_bytes_to_buffer_512 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.final(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_512
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.final(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_512]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_512
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_512 will read the file provided by the given handle
 // hash_file_512 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
 hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
+	if !load_at_once {
+		return hash_stream_512(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_512(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_512]byte{}, false
 }
 }
 
 
 hash_512 :: proc {
 hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
+	hash_stream_512,
+	hash_file_512,
+	hash_bytes_512,
+	hash_string_512,
+	hash_bytes_to_buffer_512,
+	hash_string_to_buffer_512,
 }
 }
 
 
 /*
 /*
     Low level API
     Low level API
 */
 */
 
 
-Sha3_Context :: _sha3.Sha3_Context
+Context :: _sha3.Sha3_Context
 
 
-init :: proc(ctx: ^_sha3.Sha3_Context) {
-    _sha3.init(ctx)
+init :: proc(ctx: ^Context) {
+	_sha3.init(ctx)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
-    _sha3.update(ctx, data)
+update :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(ctx, data)
 }
 }
 
 
-final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
-    _sha3.final(ctx, hash)
+final :: proc(ctx: ^Context, hash: []byte) {
+	_sha3.final(ctx, hash)
 }
 }

+ 102 - 103
core/crypto/shake/shake.odin

@@ -9,10 +9,13 @@ package shake
 
 
     Interface for the SHAKE hashing algorithm.
     Interface for the SHAKE hashing algorithm.
     The SHA3 functionality can be found in package sha3.
     The SHA3 functionality can be found in package sha3.
+
+    TODO: This should provide an incremental squeeze interface, in addition
+    to the one-shot final call.
 */
 */
 
 
-import "core:os"
 import "core:io"
 import "core:io"
+import "core:os"
 
 
 import "../_sha3"
 import "../_sha3"
 
 
@@ -26,182 +29,178 @@ DIGEST_SIZE_256 :: 32
 // hash_string_128 will hash the given input and return the
 // hash_string_128 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
 hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128(transmute([]byte)(data))
+	return hash_bytes_128(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_128 will hash the given input and return the
 // hash_bytes_128 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
 hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_128
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_128]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_128
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_128 will hash the given input and assign the
 // hash_string_to_buffer_128 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_128 will hash the given input and write the
 // hash_bytes_to_buffer_128 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_128
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_128
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream_128 will read the stream in chunks and compute a
 // hash_stream_128 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
 hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_128
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_128]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_128
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_128 will read the file provided by the given handle
 // hash_file_128 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
 hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
+	if !load_at_once {
+		return hash_stream_128(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_128(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_128]byte{}, false
 }
 }
 
 
 hash_128 :: proc {
 hash_128 :: proc {
-    hash_stream_128,
-    hash_file_128,
-    hash_bytes_128,
-    hash_string_128,
-    hash_bytes_to_buffer_128,
-    hash_string_to_buffer_128,
+	hash_stream_128,
+	hash_file_128,
+	hash_bytes_128,
+	hash_string_128,
+	hash_bytes_to_buffer_128,
+	hash_string_to_buffer_128,
 }
 }
 
 
 // hash_string_256 will hash the given input and return the
 // hash_string_256 will hash the given input and return the
 // computed hash
 // computed hash
 hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
+	return hash_bytes_256(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes_256 will hash the given input and return the
 // hash_bytes_256 will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer_256 will hash the given input and assign the
 // hash_string_to_buffer_256 will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer_256 will hash the given input and write the
 // hash_bytes_to_buffer_256 will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    _sha3.update(&ctx, data)
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash)
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: _sha3.Sha3_Context
-    ctx.mdlen = DIGEST_SIZE_256
-    _sha3.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _sha3.update(&ctx, buf[:read])
-        } 
-    }
-    _sha3.shake_xof(&ctx)
-    _sha3.shake_out(&ctx, hash[:])
-    return hash, true
+	hash: [DIGEST_SIZE_256]byte
+	ctx: Context
+	ctx.mdlen = DIGEST_SIZE_256
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file_256 will read the file provided by the given handle
 // hash_file_256 will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
+	if !load_at_once {
+		return hash_stream_256(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes_256(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE_256]byte{}, false
 }
 }
 
 
 hash_256 :: proc {
 hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
+	hash_stream_256,
+	hash_file_256,
+	hash_bytes_256,
+	hash_string_256,
+	hash_bytes_to_buffer_256,
+	hash_string_to_buffer_256,
 }
 }
 
 
 /*
 /*
     Low level API
     Low level API
 */
 */
 
 
-Shake_Context :: _sha3.Sha3_Context
+Context :: _sha3.Sha3_Context
 
 
-init :: proc(ctx: ^_sha3.Sha3_Context) {
-    _sha3.init(ctx)
+init :: proc(ctx: ^Context) {
+	_sha3.init(ctx)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^_sha3.Sha3_Context, data: []byte) {
-    _sha3.update(ctx, data)
+update :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(ctx, data)
 }
 }
 
 
-final :: proc "contextless" (ctx: ^_sha3.Sha3_Context, hash: []byte) {
-    _sha3.shake_xof(ctx)
-    _sha3.shake_out(ctx, hash[:])
+final :: proc(ctx: ^Context, hash: []byte) {
+	_sha3.shake_xof(ctx)
+	_sha3.shake_out(ctx, hash[:])
 }
 }

+ 209 - 181
core/crypto/siphash/siphash.odin

@@ -13,202 +13,200 @@ package siphash
 */
 */
 
 
 import "core:crypto"
 import "core:crypto"
-import "core:crypto/util"
+import "core:encoding/endian"
+import "core:math/bits"
 
 
 /*
 /*
     High level API
     High level API
 */
 */
 
 
-KEY_SIZE    :: 16
+KEY_SIZE :: 16
 DIGEST_SIZE :: 8
 DIGEST_SIZE :: 8
 
 
 // sum_string_1_3 will hash the given message with the key and return
 // sum_string_1_3 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
 sum_string_1_3 :: proc(msg, key: string) -> u64 {
 sum_string_1_3 :: proc(msg, key: string) -> u64 {
-    return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
+	return sum_bytes_1_3(transmute([]byte)(msg), transmute([]byte)(key))
 }
 }
 
 
 // sum_bytes_1_3 will hash the given message with the key and return
 // sum_bytes_1_3 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
-sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 {
-    ctx: Context
-    hash: u64
-    init(&ctx, key, 1, 3)
-    update(&ctx, msg)
-    final(&ctx, &hash)
-    return hash
+sum_bytes_1_3 :: proc(msg, key: []byte) -> u64 {
+	ctx: Context
+	hash: u64
+	init(&ctx, key, 1, 3)
+	update(&ctx, msg)
+	final(&ctx, &hash)
+	return hash
 }
 }
 
 
 // sum_string_to_buffer_1_3 will hash the given message with the key and write
 // sum_string_to_buffer_1_3 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) {
 sum_string_to_buffer_1_3 :: proc(msg, key: string, dst: []byte) {
-    sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
+	sum_bytes_to_buffer_1_3(transmute([]byte)(msg), transmute([]byte)(key), dst)
 }
 }
 
 
 // sum_bytes_to_buffer_1_3 will hash the given message with the key and write
 // sum_bytes_to_buffer_1_3 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) {
 sum_bytes_to_buffer_1_3 :: proc(msg, key, dst: []byte) {
-    assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
-    hash  := sum_bytes_1_3(msg, key)
-    _collect_output(dst[:], hash)
+	hash := sum_bytes_1_3(msg, key)
+	_collect_output(dst[:], hash)
 }
 }
 
 
 sum_1_3 :: proc {
 sum_1_3 :: proc {
-    sum_string_1_3,
-    sum_bytes_1_3,
-    sum_string_to_buffer_1_3,
-    sum_bytes_to_buffer_1_3,
+	sum_string_1_3,
+	sum_bytes_1_3,
+	sum_string_to_buffer_1_3,
+	sum_bytes_to_buffer_1_3,
 }
 }
 
 
-// verify_u64_1_3 will check if the supplied tag matches with the output you 
+// verify_u64_1_3 will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_u64_1_3 :: proc (tag: u64 msg, key: []byte) -> bool {
-    return sum_bytes_1_3(msg, key) == tag
+verify_u64_1_3 :: proc(tag: u64, msg, key: []byte) -> bool {
+	return sum_bytes_1_3(msg, key) == tag
 }
 }
 
 
-// verify_bytes will check if the supplied tag matches with the output you 
+// verify_bytes will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_bytes_1_3 :: proc (tag, msg, key: []byte) -> bool {
-    derived_tag: [8]byte
-    sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
-    return crypto.compare_constant_time(derived_tag[:], tag) == 1
+verify_bytes_1_3 :: proc(tag, msg, key: []byte) -> bool {
+	derived_tag: [8]byte
+	sum_bytes_to_buffer_1_3(msg, key, derived_tag[:])
+	return crypto.compare_constant_time(derived_tag[:], tag) == 1
 }
 }
 
 
 verify_1_3 :: proc {
 verify_1_3 :: proc {
-    verify_bytes_1_3,
-    verify_u64_1_3,
+	verify_bytes_1_3,
+	verify_u64_1_3,
 }
 }
 
 
 // sum_string_2_4 will hash the given message with the key and return
 // sum_string_2_4 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
 sum_string_2_4 :: proc(msg, key: string) -> u64 {
 sum_string_2_4 :: proc(msg, key: string) -> u64 {
-    return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
+	return sum_bytes_2_4(transmute([]byte)(msg), transmute([]byte)(key))
 }
 }
 
 
 // sum_bytes_2_4 will hash the given message with the key and return
 // sum_bytes_2_4 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
-sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 {
-    ctx: Context
-    hash: u64
-    init(&ctx, key, 2, 4)
-    update(&ctx, msg)
-    final(&ctx, &hash)
-    return hash
+sum_bytes_2_4 :: proc(msg, key: []byte) -> u64 {
+	ctx: Context
+	hash: u64
+	init(&ctx, key, 2, 4)
+	update(&ctx, msg)
+	final(&ctx, &hash)
+	return hash
 }
 }
 
 
 // sum_string_to_buffer_2_4 will hash the given message with the key and write
 // sum_string_to_buffer_2_4 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) {
 sum_string_to_buffer_2_4 :: proc(msg, key: string, dst: []byte) {
-    sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
+	sum_bytes_to_buffer_2_4(transmute([]byte)(msg), transmute([]byte)(key), dst)
 }
 }
 
 
 // sum_bytes_to_buffer_2_4 will hash the given message with the key and write
 // sum_bytes_to_buffer_2_4 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) {
 sum_bytes_to_buffer_2_4 :: proc(msg, key, dst: []byte) {
-    assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
-    hash  := sum_bytes_2_4(msg, key)
-    _collect_output(dst[:], hash)
+	hash := sum_bytes_2_4(msg, key)
+	_collect_output(dst[:], hash)
 }
 }
 
 
 sum_2_4 :: proc {
 sum_2_4 :: proc {
-    sum_string_2_4,
-    sum_bytes_2_4,
-    sum_string_to_buffer_2_4,
-    sum_bytes_to_buffer_2_4,
+	sum_string_2_4,
+	sum_bytes_2_4,
+	sum_string_to_buffer_2_4,
+	sum_bytes_to_buffer_2_4,
 }
 }
 
 
-sum_string           :: sum_string_2_4
-sum_bytes            :: sum_bytes_2_4
+sum_string :: sum_string_2_4
+sum_bytes :: sum_bytes_2_4
 sum_string_to_buffer :: sum_string_to_buffer_2_4
 sum_string_to_buffer :: sum_string_to_buffer_2_4
-sum_bytes_to_buffer  :: sum_bytes_to_buffer_2_4
+sum_bytes_to_buffer :: sum_bytes_to_buffer_2_4
 sum :: proc {
 sum :: proc {
-    sum_string,
-    sum_bytes,
-    sum_string_to_buffer,
-    sum_bytes_to_buffer,
+	sum_string,
+	sum_bytes,
+	sum_string_to_buffer,
+	sum_bytes_to_buffer,
 }
 }
 
 
-// verify_u64_2_4 will check if the supplied tag matches with the output you 
+// verify_u64_2_4 will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_u64_2_4 :: proc (tag: u64 msg, key: []byte) -> bool {
-    return sum_bytes_2_4(msg, key) == tag
+verify_u64_2_4 :: proc(tag: u64, msg, key: []byte) -> bool {
+	return sum_bytes_2_4(msg, key) == tag
 }
 }
 
 
-// verify_bytes will check if the supplied tag matches with the output you 
+// verify_bytes will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_bytes_2_4 :: proc (tag, msg, key: []byte) -> bool {
-    derived_tag: [8]byte
-    sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
-    return crypto.compare_constant_time(derived_tag[:], tag) == 1
+verify_bytes_2_4 :: proc(tag, msg, key: []byte) -> bool {
+	derived_tag: [8]byte
+	sum_bytes_to_buffer_2_4(msg, key, derived_tag[:])
+	return crypto.compare_constant_time(derived_tag[:], tag) == 1
 }
 }
 
 
 verify_2_4 :: proc {
 verify_2_4 :: proc {
-    verify_bytes_2_4,
-    verify_u64_2_4,
+	verify_bytes_2_4,
+	verify_u64_2_4,
 }
 }
 
 
 verify_bytes :: verify_bytes_2_4
 verify_bytes :: verify_bytes_2_4
-verify_u64   :: verify_u64_2_4
+verify_u64 :: verify_u64_2_4
 verify :: proc {
 verify :: proc {
-    verify_bytes,
-    verify_u64,
+	verify_bytes,
+	verify_u64,
 }
 }
 
 
 // sum_string_4_8 will hash the given message with the key and return
 // sum_string_4_8 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
 sum_string_4_8 :: proc(msg, key: string) -> u64 {
 sum_string_4_8 :: proc(msg, key: string) -> u64 {
-    return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
+	return sum_bytes_4_8(transmute([]byte)(msg), transmute([]byte)(key))
 }
 }
 
 
 // sum_bytes_4_8 will hash the given message with the key and return
 // sum_bytes_4_8 will hash the given message with the key and return
 // the computed hash as a u64
 // the computed hash as a u64
-sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 {
-    ctx: Context
-    hash: u64
-    init(&ctx, key, 4, 8)
-    update(&ctx, msg)
-    final(&ctx, &hash)
-    return hash
+sum_bytes_4_8 :: proc(msg, key: []byte) -> u64 {
+	ctx: Context
+	hash: u64
+	init(&ctx, key, 4, 8)
+	update(&ctx, msg)
+	final(&ctx, &hash)
+	return hash
 }
 }
 
 
 // sum_string_to_buffer_4_8 will hash the given message with the key and write
 // sum_string_to_buffer_4_8 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) {
 sum_string_to_buffer_4_8 :: proc(msg, key: string, dst: []byte) {
-    sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst)
+	sum_bytes_to_buffer_4_8(transmute([]byte)(msg), transmute([]byte)(key), dst)
 }
 }
 
 
 // sum_bytes_to_buffer_4_8 will hash the given message with the key and write
 // sum_bytes_to_buffer_4_8 will hash the given message with the key and write
 // the computed hash into the provided destination buffer
 // the computed hash into the provided destination buffer
 sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) {
 sum_bytes_to_buffer_4_8 :: proc(msg, key, dst: []byte) {
-    assert(len(dst) >= DIGEST_SIZE, "crypto/siphash: Destination buffer needs to be at least of size 8")
-    hash  := sum_bytes_4_8(msg, key)
-    _collect_output(dst[:], hash)
+	hash := sum_bytes_4_8(msg, key)
+	_collect_output(dst[:], hash)
 }
 }
 
 
 sum_4_8 :: proc {
 sum_4_8 :: proc {
-    sum_string_4_8,
-    sum_bytes_4_8,
-    sum_string_to_buffer_4_8,
-    sum_bytes_to_buffer_4_8,
+	sum_string_4_8,
+	sum_bytes_4_8,
+	sum_string_to_buffer_4_8,
+	sum_bytes_to_buffer_4_8,
 }
 }
 
 
-// verify_u64_4_8 will check if the supplied tag matches with the output you 
+// verify_u64_4_8 will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_u64_4_8 :: proc (tag: u64 msg, key: []byte) -> bool {
-    return sum_bytes_4_8(msg, key) == tag
+verify_u64_4_8 :: proc(tag: u64, msg, key: []byte) -> bool {
+	return sum_bytes_4_8(msg, key) == tag
 }
 }
 
 
-// verify_bytes will check if the supplied tag matches with the output you 
+// verify_bytes will check if the supplied tag matches with the output you
 // will get from the provided message and key
 // will get from the provided message and key
-verify_bytes_4_8 :: proc (tag, msg, key: []byte) -> bool {
-    derived_tag: [8]byte
-    sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
-    return crypto.compare_constant_time(derived_tag[:], tag) == 1
+verify_bytes_4_8 :: proc(tag, msg, key: []byte) -> bool {
+	derived_tag: [8]byte
+	sum_bytes_to_buffer_4_8(msg, key, derived_tag[:])
+	return crypto.compare_constant_time(derived_tag[:], tag) == 1
 }
 }
 
 
 verify_4_8 :: proc {
 verify_4_8 :: proc {
-    verify_bytes_4_8,
-    verify_u64_4_8,
+	verify_bytes_4_8,
+	verify_u64_4_8,
 }
 }
 
 
 /*
 /*
@@ -216,120 +214,150 @@ verify_4_8 :: proc {
 */
 */
 
 
 init :: proc(ctx: ^Context, key: []byte, c_rounds, d_rounds: int) {
 init :: proc(ctx: ^Context, key: []byte, c_rounds, d_rounds: int) {
-    assert(len(key) == KEY_SIZE, "crypto/siphash: Invalid key size, want 16")
-    ctx.c_rounds = c_rounds
-    ctx.d_rounds = d_rounds
-    is_valid_setting := (ctx.c_rounds == 1 && ctx.d_rounds == 3) ||
-                        (ctx.c_rounds == 2 && ctx.d_rounds == 4) ||
-                        (ctx.c_rounds == 4 && ctx.d_rounds == 8)
-    assert(is_valid_setting, "crypto/siphash: Incorrect rounds set up. Valid pairs are (1,3), (2,4) and (4,8)")
-    ctx.k0 = util.U64_LE(key[:8])
-    ctx.k1 = util.U64_LE(key[8:])
-    ctx.v0 = 0x736f6d6570736575 ~ ctx.k0
-    ctx.v1 = 0x646f72616e646f6d ~ ctx.k1
-    ctx.v2 = 0x6c7967656e657261 ~ ctx.k0
-    ctx.v3 = 0x7465646279746573 ~ ctx.k1
-    ctx.is_initialized = true
+	if len(key) != KEY_SIZE {
+		panic("crypto/siphash; invalid key size")
+	}
+	ctx.c_rounds = c_rounds
+	ctx.d_rounds = d_rounds
+	is_valid_setting :=
+		(ctx.c_rounds == 1 && ctx.d_rounds == 3) ||
+		(ctx.c_rounds == 2 && ctx.d_rounds == 4) ||
+		(ctx.c_rounds == 4 && ctx.d_rounds == 8)
+	if !is_valid_setting {
+		panic("crypto/siphash: incorrect rounds set up")
+	}
+	ctx.k0 = endian.unchecked_get_u64le(key[:8])
+	ctx.k1 = endian.unchecked_get_u64le(key[8:])
+	ctx.v0 = 0x736f6d6570736575 ~ ctx.k0
+	ctx.v1 = 0x646f72616e646f6d ~ ctx.k1
+	ctx.v2 = 0x6c7967656e657261 ~ ctx.k0
+	ctx.v3 = 0x7465646279746573 ~ ctx.k1
+
+	ctx.last_block = 0
+	ctx.total_length = 0
+
+	ctx.is_initialized = true
 }
 }
 
 
 update :: proc(ctx: ^Context, data: []byte) {
 update :: proc(ctx: ^Context, data: []byte) {
-    assert(ctx.is_initialized, "crypto/siphash: Context is not initialized")
-    ctx.last_block = len(data) / 8 * 8
-    ctx.buf = data
-    i := 0
-    m: u64
-    for i < ctx.last_block {
-        m = u64(ctx.buf[i] & 0xff)
-        i += 1
-
-        for r in u64(1)..<8 {
-            m |= u64(ctx.buf[i] & 0xff) << (r * 8)
-            i += 1
-        }
-
-        ctx.v3 ~= m
-        for _ in 0..<ctx.c_rounds {
-            _compress(ctx)
-        }
-
-        ctx.v0 ~= m
-    }
+	assert(ctx.is_initialized, "crypto/siphash: context is not initialized")
+
+	data := data
+	ctx.total_length += len(data)
+	if ctx.last_block > 0 {
+		n := copy(ctx.buf[ctx.last_block:], data)
+		ctx.last_block += n
+		if ctx.last_block == BLOCK_SIZE {
+			block(ctx, ctx.buf[:])
+			ctx.last_block = 0
+		}
+		data = data[n:]
+	}
+	if len(data) >= BLOCK_SIZE {
+		n := len(data) &~ (BLOCK_SIZE - 1)
+		block(ctx, data[:n])
+		data = data[n:]
+	}
+	if len(data) > 0 {
+		ctx.last_block = copy(ctx.buf[:], data)
+	}
 }
 }
 
 
 final :: proc(ctx: ^Context, dst: ^u64) {
 final :: proc(ctx: ^Context, dst: ^u64) {
-    m: u64
-    for i := len(ctx.buf) - 1; i >= ctx.last_block; i -= 1 {
-        m <<= 8
-        m |= u64(ctx.buf[i] & 0xff)
-    }
-    m |= u64(len(ctx.buf) << 56)
+	assert(ctx.is_initialized, "crypto/siphash: context is not initialized")
 
 
-    ctx.v3 ~= m
+	tmp: [BLOCK_SIZE]byte
+	copy(tmp[:], ctx.buf[:ctx.last_block])
+	tmp[7] = byte(ctx.total_length & 0xff)
+	block(ctx, tmp[:])
 
 
-    for _ in 0..<ctx.c_rounds {
-        _compress(ctx)
-    }
+	ctx.v2 ~= 0xff
 
 
-    ctx.v0 ~= m
-    ctx.v2 ~= 0xff
+	for _ in 0 ..< ctx.d_rounds {
+		_compress(ctx)
+	}
 
 
-    for _ in 0..<ctx.d_rounds {
-        _compress(ctx)
-    }
+	dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3
 
 
-    dst^ = ctx.v0 ~ ctx.v1 ~ ctx.v2 ~ ctx.v3
-
-    reset(ctx)
+	reset(ctx)
 }
 }
 
 
 reset :: proc(ctx: ^Context) {
 reset :: proc(ctx: ^Context) {
-    ctx.k0, ctx.k1 = 0, 0
-    ctx.v0, ctx.v1 = 0, 0
-    ctx.v2, ctx.v3 = 0, 0
-    ctx.last_block = 0
-    ctx.c_rounds = 0
-    ctx.d_rounds = 0
-    ctx.is_initialized = false
+	ctx.k0, ctx.k1 = 0, 0
+	ctx.v0, ctx.v1 = 0, 0
+	ctx.v2, ctx.v3 = 0, 0
+	ctx.last_block = 0
+	ctx.total_length = 0
+	ctx.c_rounds = 0
+	ctx.d_rounds = 0
+	ctx.is_initialized = false
 }
 }
 
 
+BLOCK_SIZE :: 8
+
 Context :: struct {
 Context :: struct {
-    v0, v1, v2, v3: u64,    // State values
-    k0, k1:         u64,    // Split key
-    c_rounds:       int,    // Number of message rounds
-    d_rounds:       int,    // Number of finalization rounds
-    buf:            []byte, // Provided data
-    last_block:     int,    // Offset from the last block
-    is_initialized: bool,
+	v0, v1, v2, v3: u64, // State values
+	k0, k1:         u64, // Split key
+	c_rounds:       int, // Number of message rounds
+	d_rounds:       int, // Number of finalization rounds
+	buf:            [BLOCK_SIZE]byte, // Provided data
+	last_block:     int, // Offset from the last block
+	total_length:   int,
+	is_initialized: bool,
+}
+
+@(private)
+block :: proc "contextless" (ctx: ^Context, buf: []byte) {
+	buf := buf
+
+	for len(buf) >= BLOCK_SIZE {
+		m := endian.unchecked_get_u64le(buf)
+
+		ctx.v3 ~= m
+		for _ in 0 ..< ctx.c_rounds {
+			_compress(ctx)
+		}
+
+		ctx.v0 ~= m
+
+		buf = buf[BLOCK_SIZE:]
+	}
 }
 }
 
 
+@(private)
 _get_byte :: #force_inline proc "contextless" (byte_num: byte, into: u64) -> byte {
 _get_byte :: #force_inline proc "contextless" (byte_num: byte, into: u64) -> byte {
-    return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3))
+	return byte(into >> (((~byte_num) & (size_of(u64) - 1)) << 3))
 }
 }
 
 
-_collect_output :: #force_inline proc "contextless" (dst: []byte, hash: u64) {
-    dst[0] = _get_byte(7, hash)
-    dst[1] = _get_byte(6, hash)
-    dst[2] = _get_byte(5, hash)
-    dst[3] = _get_byte(4, hash)
-    dst[4] = _get_byte(3, hash)
-    dst[5] = _get_byte(2, hash)
-    dst[6] = _get_byte(1, hash)
-    dst[7] = _get_byte(0, hash)
+@(private)
+_collect_output :: #force_inline proc(dst: []byte, hash: u64) {
+	if len(dst) < DIGEST_SIZE {
+		panic("crypto/siphash: invalid tag size")
+	}
+	dst[0] = _get_byte(7, hash)
+	dst[1] = _get_byte(6, hash)
+	dst[2] = _get_byte(5, hash)
+	dst[3] = _get_byte(4, hash)
+	dst[4] = _get_byte(3, hash)
+	dst[5] = _get_byte(2, hash)
+	dst[6] = _get_byte(1, hash)
+	dst[7] = _get_byte(0, hash)
 }
 }
 
 
+@(private)
 _compress :: #force_inline proc "contextless" (ctx: ^Context) {
 _compress :: #force_inline proc "contextless" (ctx: ^Context) {
-    ctx.v0 += ctx.v1
-    ctx.v1  = util.ROTL64(ctx.v1, 13)
-    ctx.v1 ~= ctx.v0
-    ctx.v0  = util.ROTL64(ctx.v0, 32)
-    ctx.v2 += ctx.v3
-    ctx.v3  = util.ROTL64(ctx.v3, 16)
-    ctx.v3 ~= ctx.v2
-    ctx.v0 += ctx.v3
-    ctx.v3  = util.ROTL64(ctx.v3, 21)
-    ctx.v3 ~= ctx.v0
-    ctx.v2 += ctx.v1
-    ctx.v1  = util.ROTL64(ctx.v1, 17)
-    ctx.v1 ~= ctx.v2
-    ctx.v2  = util.ROTL64(ctx.v2, 32)
+	ctx.v0 += ctx.v1
+	ctx.v1 = bits.rotate_left64(ctx.v1, 13)
+	ctx.v1 ~= ctx.v0
+	ctx.v0 = bits.rotate_left64(ctx.v0, 32)
+	ctx.v2 += ctx.v3
+	ctx.v3 = bits.rotate_left64(ctx.v3, 16)
+	ctx.v3 ~= ctx.v2
+	ctx.v0 += ctx.v3
+	ctx.v3 = bits.rotate_left64(ctx.v3, 21)
+	ctx.v3 ~= ctx.v0
+	ctx.v2 += ctx.v1
+	ctx.v1 = bits.rotate_left64(ctx.v1, 17)
+	ctx.v1 ~= ctx.v2
+	ctx.v2 = bits.rotate_left64(ctx.v2, 32)
 }
 }

+ 204 - 175
core/crypto/sm3/sm3.odin

@@ -10,10 +10,10 @@ package sm3
     Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
     Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
 */
 */
 
 
-import "core:os"
+import "core:encoding/endian"
 import "core:io"
 import "core:io"
-
-import "../util"
+import "core:math/bits"
+import "core:os"
 
 
 /*
 /*
     High level API
     High level API
@@ -24,227 +24,256 @@ DIGEST_SIZE :: 32
 // hash_string will hash the given input and return the
 // hash_string will hash the given input and return the
 // computed hash
 // computed hash
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
 hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
+	return hash_bytes(transmute([]byte)(data))
 }
 }
 
 
 // hash_bytes will hash the given input and return the
 // hash_bytes will hash the given input and return the
 // computed hash
 // computed hash
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: Sm3_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
+	return hash
 }
 }
 
 
 // hash_string_to_buffer will hash the given input and assign the
 // hash_string_to_buffer will hash the given input and assign the
 // computed hash to the second parameter.
 // computed hash to the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
 hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
+	hash_bytes_to_buffer(transmute([]byte)(data), hash)
 }
 }
 
 
 // hash_bytes_to_buffer will hash the given input and write the
 // hash_bytes_to_buffer will hash the given input and write the
 // computed hash into the second parameter.
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Sm3_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash)
+	ctx: Context
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: Sm3_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true 
+	hash: [DIGEST_SIZE]byte
+	ctx: Context
+	init(&ctx)
+
+	buf := make([]byte, 512)
+	defer delete(buf)
+
+	read := 1
+	for read > 0 {
+		read, _ = io.read(s, buf)
+		if read > 0 {
+			update(&ctx, buf[:read])
+		}
+	}
+	final(&ctx, hash[:])
+	return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
 // and compute a hash
 // and compute a hash
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
 hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
+	if !load_at_once {
+		return hash_stream(os.stream_from_handle(hd))
+	} else {
+		if buf, ok := os.read_entire_file(hd); ok {
+			return hash_bytes(buf[:]), ok
+		}
+	}
+	return [DIGEST_SIZE]byte{}, false
 }
 }
 
 
 hash :: proc {
 hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
 }
 }
 
 
 /*
 /*
     Low level API
     Low level API
 */
 */
 
 
-init :: proc(ctx: ^Sm3_Context) {
-    ctx.state[0] = IV[0]
-    ctx.state[1] = IV[1]
-    ctx.state[2] = IV[2]
-    ctx.state[3] = IV[3]
-    ctx.state[4] = IV[4]
-    ctx.state[5] = IV[5]
-    ctx.state[6] = IV[6]
-    ctx.state[7] = IV[7]
+init :: proc(ctx: ^Context) {
+	ctx.state[0] = IV[0]
+	ctx.state[1] = IV[1]
+	ctx.state[2] = IV[2]
+	ctx.state[3] = IV[3]
+	ctx.state[4] = IV[4]
+	ctx.state[5] = IV[5]
+	ctx.state[6] = IV[6]
+	ctx.state[7] = IV[7]
+
+	ctx.length = 0
+	ctx.bitlength = 0
+
+	ctx.is_initialized = true
 }
 }
 
 
-update :: proc(ctx: ^Sm3_Context, data: []byte) {
-    data := data
-    ctx.length += u64(len(data))
-
-    if ctx.bitlength > 0 {
-        n := copy(ctx.x[ctx.bitlength:], data[:])
-        ctx.bitlength += u64(n)
-        if ctx.bitlength == 64 {
-            block(ctx, ctx.x[:])
-            ctx.bitlength = 0
-        }
-        data = data[n:]
-    }
-    if len(data) >= 64 {
-        n := len(data) &~ (64 - 1)
-        block(ctx, data[:n])
-        data = data[n:]
-    }
-    if len(data) > 0 {
-        ctx.bitlength = u64(copy(ctx.x[:], data[:]))
-    }
+update :: proc(ctx: ^Context, data: []byte) {
+	assert(ctx.is_initialized)
+
+	data := data
+	ctx.length += u64(len(data))
+
+	if ctx.bitlength > 0 {
+		n := copy(ctx.x[ctx.bitlength:], data[:])
+		ctx.bitlength += u64(n)
+		if ctx.bitlength == BLOCK_SIZE {
+			block(ctx, ctx.x[:])
+			ctx.bitlength = 0
+		}
+		data = data[n:]
+	}
+	if len(data) >= BLOCK_SIZE {
+		n := len(data) &~ (BLOCK_SIZE - 1)
+		block(ctx, data[:n])
+		data = data[n:]
+	}
+	if len(data) > 0 {
+		ctx.bitlength = u64(copy(ctx.x[:], data[:]))
+	}
 }
 }
 
 
-final :: proc(ctx: ^Sm3_Context, hash: []byte) {
-    length := ctx.length
-
-    pad: [64]byte
-    pad[0] = 0x80
-    if length % 64 < 56 {
-        update(ctx, pad[0: 56 - length % 64])
-    } else {
-        update(ctx, pad[0: 64 + 56 - length % 64])
-    }
-
-    length <<= 3
-    util.PUT_U64_BE(pad[:], length)
-    update(ctx, pad[0: 8])
-    assert(ctx.bitlength == 0)
-
-    util.PUT_U32_BE(hash[0:],  ctx.state[0])
-    util.PUT_U32_BE(hash[4:],  ctx.state[1])
-    util.PUT_U32_BE(hash[8:],  ctx.state[2])
-    util.PUT_U32_BE(hash[12:], ctx.state[3])
-    util.PUT_U32_BE(hash[16:], ctx.state[4])
-    util.PUT_U32_BE(hash[20:], ctx.state[5])
-    util.PUT_U32_BE(hash[24:], ctx.state[6])
-    util.PUT_U32_BE(hash[28:], ctx.state[7])
+final :: proc(ctx: ^Context, hash: []byte) {
+	assert(ctx.is_initialized)
+
+	if len(hash) < DIGEST_SIZE {
+		panic("crypto/sm3: invalid destination digest size")
+	}
+
+	length := ctx.length
+
+	pad: [BLOCK_SIZE]byte
+	pad[0] = 0x80
+	if length % BLOCK_SIZE < 56 {
+		update(ctx, pad[0:56 - length % BLOCK_SIZE])
+	} else {
+		update(ctx, pad[0:BLOCK_SIZE + 56 - length % BLOCK_SIZE])
+	}
+
+	length <<= 3
+	endian.unchecked_put_u64be(pad[:], length)
+	update(ctx, pad[0:8])
+	assert(ctx.bitlength == 0)
+
+	for i := 0; i < DIGEST_SIZE / 4; i += 1 {
+		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
+	}
+
+	ctx.is_initialized = false
 }
 }
 
 
 /*
 /*
     SM3 implementation
     SM3 implementation
 */
 */
 
 
-Sm3_Context :: struct {
-    state:     [8]u32,
-    x:         [64]byte,
-    bitlength: u64,
-    length:    u64,
+BLOCK_SIZE :: 64
+
+Context :: struct {
+	state:     [8]u32,
+	x:         [BLOCK_SIZE]byte,
+	bitlength: u64,
+	length:    u64,
+
+	is_initialized: bool,
 }
 }
 
 
+@(private)
 IV := [8]u32 {
 IV := [8]u32 {
-    0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
-    0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e,
+	0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
+	0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e,
 }
 }
 
 
-block :: proc "contextless" (ctx: ^Sm3_Context, buf: []byte) {
-    buf := buf
-
-    w:  [68]u32
-    wp: [64]u32
-
-    state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3]
-    state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7]
-
-    for len(buf) >= 64 {
-        for i := 0; i < 16; i += 1 {
-            j := i * 4
-            w[i] = u32(buf[j]) << 24 | u32(buf[j + 1]) << 16 | u32(buf[j + 2]) << 8 | u32(buf[j + 3])
-        }
-        for i := 16; i < 68; i += 1 {
-            p1v := w[i - 16] ~ w[i - 9] ~ util.ROTL32(w[i - 3], 15)
-            // @note(zh): inlined P1
-            w[i] = p1v ~ util.ROTL32(p1v, 15) ~ util.ROTL32(p1v, 23) ~ util.ROTL32(w[i - 13], 7) ~ w[i - 6]
-        }
-        for i := 0; i < 64; i += 1 {
-            wp[i] = w[i] ~ w[i + 4]
-        }
-
-        a, b, c, d := state0, state1, state2, state3
-        e, f, g, h := state4, state5, state6, state7
-
-        for i := 0; i < 16; i += 1 {
-            v1  := util.ROTL32(u32(a), 12)
-            ss1 := util.ROTL32(v1 + u32(e) + util.ROTL32(0x79cc4519, i), 7)
-            ss2 := ss1 ~ v1
-
-            // @note(zh): inlined FF1
-            tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i]
-            // @note(zh): inlined GG1
-            tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i]
-
-            a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c
-            // @note(zh): inlined P0
-            e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g
-        }
-
-        for i := 16; i < 64; i += 1 {
-            v   := util.ROTL32(u32(a), 12)
-            ss1 := util.ROTL32(v + u32(e) + util.ROTL32(0x7a879d8a, i % 32), 7)
-            ss2 := ss1 ~ v
-
-            // @note(zh): inlined FF2
-            tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i]
-            // @note(zh): inlined GG2
-            tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i]
-
-            a, b, c, d = tt1, a, util.ROTL32(u32(b), 9), c
-            // @note(zh): inlined P0
-            e, f, g, h = (tt2 ~ util.ROTL32(tt2, 9) ~ util.ROTL32(tt2, 17)), e, util.ROTL32(u32(f), 19), g
-        }
-
-        state0 ~= a
-        state1 ~= b
-        state2 ~= c
-        state3 ~= d
-        state4 ~= e
-        state5 ~= f
-        state6 ~= g
-        state7 ~= h
-
-        buf = buf[64:]
-    }
-
-    ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3
-    ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7
+@(private)
+block :: proc "contextless" (ctx: ^Context, buf: []byte) {
+	buf := buf
+
+	w: [68]u32
+	wp: [64]u32
+
+	state0, state1, state2, state3 := ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3]
+	state4, state5, state6, state7 := ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7]
+
+	for len(buf) >= BLOCK_SIZE {
+		for i := 0; i < 16; i += 1 {
+			w[i] = endian.unchecked_get_u32be(buf[i * 4:])
+		}
+		for i := 16; i < 68; i += 1 {
+			p1v := w[i - 16] ~ w[i - 9] ~ bits.rotate_left32(w[i - 3], 15)
+			// @note(zh): inlined P1
+			w[i] =
+				p1v ~
+				bits.rotate_left32(p1v, 15) ~
+				bits.rotate_left32(p1v, 23) ~
+				bits.rotate_left32(w[i - 13], 7) ~
+				w[i - 6]
+		}
+		for i := 0; i < 64; i += 1 {
+			wp[i] = w[i] ~ w[i + 4]
+		}
+
+		a, b, c, d := state0, state1, state2, state3
+		e, f, g, h := state4, state5, state6, state7
+
+		for i := 0; i < 16; i += 1 {
+			v1 := bits.rotate_left32(u32(a), 12)
+			ss1 := bits.rotate_left32(v1 + u32(e) + bits.rotate_left32(0x79cc4519, i), 7)
+			ss2 := ss1 ~ v1
+
+			// @note(zh): inlined FF1
+			tt1 := u32(a ~ b ~ c) + u32(d) + ss2 + wp[i]
+			// @note(zh): inlined GG1
+			tt2 := u32(e ~ f ~ g) + u32(h) + ss1 + w[i]
+
+			a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c
+			// @note(zh): inlined P0
+			e, f, g, h =
+				(tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)),
+				e,
+				bits.rotate_left32(u32(f), 19),
+				g
+		}
+
+		for i := 16; i < 64; i += 1 {
+			v := bits.rotate_left32(u32(a), 12)
+			ss1 := bits.rotate_left32(v + u32(e) + bits.rotate_left32(0x7a879d8a, i % 32), 7)
+			ss2 := ss1 ~ v
+
+			// @note(zh): inlined FF2
+			tt1 := u32(((a & b) | (a & c) | (b & c)) + d) + ss2 + wp[i]
+			// @note(zh): inlined GG2
+			tt2 := u32(((e & f) | ((~e) & g)) + h) + ss1 + w[i]
+
+			a, b, c, d = tt1, a, bits.rotate_left32(u32(b), 9), c
+			// @note(zh): inlined P0
+			e, f, g, h =
+				(tt2 ~ bits.rotate_left32(tt2, 9) ~ bits.rotate_left32(tt2, 17)),
+				e,
+				bits.rotate_left32(u32(f), 19),
+				g
+		}
+
+		state0 ~= a
+		state1 ~= b
+		state2 ~= c
+		state3 ~= d
+		state4 ~= e
+		state5 ~= f
+		state6 ~= g
+		state7 ~= h
+
+		buf = buf[BLOCK_SIZE:]
+	}
+
+	ctx.state[0], ctx.state[1], ctx.state[2], ctx.state[3] = state0, state1, state2, state3
+	ctx.state[4], ctx.state[5], ctx.state[6], ctx.state[7] = state4, state5, state6, state7
 }
 }

+ 0 - 517
core/crypto/streebog/streebog.odin

@@ -1,517 +0,0 @@
-package streebog
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the Streebog hashing algorithm, standardized as GOST R 34.11-2012 in RFC 6986 <https://datatracker.ietf.org/doc/html/rfc6986>
-*/
-
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_512 :: 64
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Streebog_Context
-    ctx.is256 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: Streebog_Context
-    ctx.is256 = true
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: Streebog_Context
-    ctx.is256 = true
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-	hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Streebog_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: Streebog_Context
-    init(&ctx)
-    update(&ctx, data)
-    final(&ctx, hash[:])
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: Streebog_Context
-    init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            update(&ctx, buf[:read])
-        } 
-    }
-    final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^Streebog_Context) {
-	if ctx.is256 {
-		ctx.hash_size = 256
-		for _, i in ctx.h {
-			ctx.h[i] = 0x01
-		}
-	} else {
-		ctx.hash_size = 512
-	}
-	ctx.v_512[1] = 0x02
-}
-
-update :: proc(ctx: ^Streebog_Context, data: []byte) {
-	length := u64(len(data))
-	chk_size: u64
-	data := data
-	for (length > 63) && (ctx.buf_size == 0) {
-		stage2(ctx, data)
-		data = data[64:]
-		length -= 64
-	}
-
-	for length != 0 {
-		chk_size = 64 - ctx.buf_size
-		if chk_size > length {
-			chk_size = length
-		}
-		copy(ctx.buffer[ctx.buf_size:], data[:chk_size])
-		ctx.buf_size += chk_size
-		length -= chk_size
-		data = data[chk_size:]
-		if ctx.buf_size == 64 {
-			stage2(ctx, ctx.buffer[:])
-			ctx.buf_size = 0
-		}
-	}
-}
-
-final :: proc(ctx: ^Streebog_Context, hash: []byte) {
-	t: [64]byte
-	t[1] = byte((ctx.buf_size * 8) >> 8) & 0xff
-	t[0] = byte((ctx.buf_size) * 8) & 0xff
-
-	padding(ctx)
-
-	G(ctx.h[:], ctx.n[:], ctx.buffer[:])
-
-	add_mod_512(ctx.n[:], t[:], ctx.n[:])
-	add_mod_512(ctx.sigma[:], ctx.buffer[:], ctx.sigma[:])
-
-	G(ctx.h[:], ctx.v_0[:], ctx.n[:])
-	G(ctx.h[:], ctx.v_0[:], ctx.sigma[:])
-
-	if ctx.is256 {
-		copy(hash[:], ctx.h[32:])
-	} else {
-		copy(hash[:], ctx.h[:])
-	}
-}
-
-/*
-    Streebog implementation
-*/
-
-PI := [256]byte {
-	252, 238, 221, 17,  207, 110, 49,  22,  251, 196, 250, 218, 35,  197, 4,   77,
-	233, 119, 240, 219, 147, 46,  153, 186, 23,  54,  241, 187, 20,  205, 95,  193,
-	249, 24,  101, 90,  226, 92,  239, 33,  129, 28,  60,  66,  139, 1,   142, 79,
-	5,   132, 2,   174, 227, 106, 143, 160, 6,   11,  237, 152, 127, 212, 211, 31,
-	235, 52,  44,  81,  234, 200, 72,  171, 242, 42,  104, 162, 253, 58,  206, 204,
-	181, 112, 14,  86,  8,   12,  118, 18,  191, 114, 19,  71,  156, 183, 93,  135,
-	21,  161, 150, 41,  16,  123, 154, 199, 243, 145, 120, 111, 157, 158, 178, 177,
-	50,  117, 25,  61,  255, 53,  138, 126, 109, 84,  198, 128, 195, 189, 13,  87,
-	223, 245, 36,  169, 62,  168, 67,  201, 215, 121, 214, 246, 124, 34,  185, 3,
-	224, 15,  236, 222, 122, 148, 176, 188, 220, 232, 40,  80,  78,  51,  10,  74,
-	167, 151, 96,  115, 30,  0,   98,  68,  26,  184, 56,  130, 100, 159, 38,  65,
-	173, 69,  70,  146, 39,  94,  85,  47,  140, 163, 165, 125, 105, 213, 149, 59,
-	7,   88,  179, 64,  134, 172, 29,  247, 48,  55,  107, 228,	136, 217, 231, 137,
-	225, 27,  131, 73,  76,  63,  248, 254, 141, 83,  170, 144, 202, 216, 133, 97,
-	32,  113, 103, 164, 45,  43,  9,   91,  203, 155, 37,  208, 190, 229, 108, 82,
-	89,  166, 116, 210,	230, 244, 180, 192, 209, 102, 175, 194, 57,  75,  99,  182,
-}
-
-TAU := [64]byte {
-	0,  8, 16, 24, 32, 40, 48, 56,
-	1,  9, 17, 25, 33, 41, 49, 57,
-	2, 10, 18, 26, 34, 42, 50, 58,
-	3, 11, 19, 27, 35, 43, 51, 59,
-	4, 12, 20, 28, 36, 44, 52, 60,
-	5, 13, 21, 29, 37, 45, 53, 61,
-	6, 14, 22, 30, 38, 46, 54, 62,
-	7, 15, 23, 31, 39, 47, 55, 63,
-}
-
-STREEBOG_A := [64]u64 {
-	0x8e20faa72ba0b470, 0x47107ddd9b505a38, 0xad08b0e0c3282d1c, 0xd8045870ef14980e,
-	0x6c022c38f90a4c07, 0x3601161cf205268d, 0x1b8e0b0e798c13c8, 0x83478b07b2468764,
-	0xa011d380818e8f40, 0x5086e740ce47c920, 0x2843fd2067adea10, 0x14aff010bdd87508,
-	0x0ad97808d06cb404, 0x05e23c0468365a02, 0x8c711e02341b2d01, 0x46b60f011a83988e,
-	0x90dab52a387ae76f, 0x486dd4151c3dfdb9, 0x24b86a840e90f0d2, 0x125c354207487869,
-	0x092e94218d243cba, 0x8a174a9ec8121e5d, 0x4585254f64090fa0, 0xaccc9ca9328a8950,
-	0x9d4df05d5f661451, 0xc0a878a0a1330aa6, 0x60543c50de970553, 0x302a1e286fc58ca7,
-	0x18150f14b9ec46dd, 0x0c84890ad27623e0, 0x0642ca05693b9f70, 0x0321658cba93c138,
-	0x86275df09ce8aaa8, 0x439da0784e745554, 0xafc0503c273aa42a, 0xd960281e9d1d5215,
-	0xe230140fc0802984, 0x71180a8960409a42, 0xb60c05ca30204d21, 0x5b068c651810a89e,
-	0x456c34887a3805b9, 0xac361a443d1c8cd2, 0x561b0d22900e4669, 0x2b838811480723ba,
-	0x9bcf4486248d9f5d, 0xc3e9224312c8c1a0, 0xeffa11af0964ee50, 0xf97d86d98a327728,
-	0xe4fa2054a80b329c, 0x727d102a548b194e, 0x39b008152acb8227, 0x9258048415eb419d,
-	0x492c024284fbaec0, 0xaa16012142f35760, 0x550b8e9e21f7a530, 0xa48b474f9ef5dc18,
-	0x70a6a56e2440598e, 0x3853dc371220a247, 0x1ca76e95091051ad, 0x0edd37c48a08a6d8,
-	0x07e095624504536c, 0x8d70c431ac02a736, 0xc83862965601dd1b, 0x641c314b2b8ee083,
-}
-
-STREEBOG_C := [12][64]byte { 
-	{
-		0x07, 0x45, 0xa6, 0xf2, 0x59, 0x65, 0x80, 0xdd,
-		0x23, 0x4d, 0x74, 0xcc, 0x36, 0x74, 0x76, 0x05,
-		0x15, 0xd3, 0x60, 0xa4, 0x08, 0x2a, 0x42, 0xa2,
-		0x01, 0x69, 0x67, 0x92, 0x91, 0xe0, 0x7c, 0x4b,
-		0xfc, 0xc4, 0x85, 0x75, 0x8d, 0xb8, 0x4e, 0x71,
-		0x16, 0xd0, 0x45, 0x2e, 0x43, 0x76, 0x6a, 0x2f,
-		0x1f, 0x7c, 0x65, 0xc0, 0x81, 0x2f, 0xcb, 0xeb,
-		0xe9, 0xda, 0xca, 0x1e, 0xda, 0x5b, 0x08, 0xb1,
-	},
-	{
-		0xb7, 0x9b, 0xb1, 0x21, 0x70, 0x04, 0x79, 0xe6,
-		0x56, 0xcd, 0xcb, 0xd7, 0x1b, 0xa2, 0xdd, 0x55,
-		0xca, 0xa7, 0x0a, 0xdb, 0xc2, 0x61, 0xb5, 0x5c,
-		0x58, 0x99, 0xd6, 0x12, 0x6b, 0x17, 0xb5, 0x9a,
-		0x31, 0x01, 0xb5, 0x16, 0x0f, 0x5e, 0xd5, 0x61,
-		0x98, 0x2b, 0x23, 0x0a, 0x72, 0xea, 0xfe, 0xf3,
-		0xd7, 0xb5, 0x70, 0x0f, 0x46, 0x9d, 0xe3, 0x4f,
-		0x1a, 0x2f, 0x9d, 0xa9, 0x8a, 0xb5, 0xa3, 0x6f,
-	},
-	{
-		0xb2, 0x0a, 0xba, 0x0a, 0xf5, 0x96, 0x1e, 0x99,
-		0x31, 0xdb, 0x7a, 0x86, 0x43, 0xf4, 0xb6, 0xc2,
-		0x09, 0xdb, 0x62, 0x60, 0x37, 0x3a, 0xc9, 0xc1,
-		0xb1, 0x9e, 0x35, 0x90, 0xe4, 0x0f, 0xe2, 0xd3,
-		0x7b, 0x7b, 0x29, 0xb1, 0x14, 0x75, 0xea, 0xf2,
-		0x8b, 0x1f, 0x9c, 0x52, 0x5f, 0x5e, 0xf1, 0x06,
-		0x35, 0x84, 0x3d, 0x6a, 0x28, 0xfc, 0x39, 0x0a,
-		0xc7, 0x2f, 0xce, 0x2b, 0xac, 0xdc, 0x74, 0xf5,
-	},
-	{
-		0x2e, 0xd1, 0xe3, 0x84, 0xbc, 0xbe, 0x0c, 0x22,
-		0xf1, 0x37, 0xe8, 0x93, 0xa1, 0xea, 0x53, 0x34,
-		0xbe, 0x03, 0x52, 0x93, 0x33, 0x13, 0xb7, 0xd8,
-		0x75, 0xd6, 0x03, 0xed, 0x82, 0x2c, 0xd7, 0xa9,
-		0x3f, 0x35, 0x5e, 0x68, 0xad, 0x1c, 0x72, 0x9d,
-		0x7d, 0x3c, 0x5c, 0x33, 0x7e, 0x85, 0x8e, 0x48,
-		0xdd, 0xe4, 0x71, 0x5d, 0xa0, 0xe1, 0x48, 0xf9,
-		0xd2, 0x66, 0x15, 0xe8, 0xb3, 0xdf, 0x1f, 0xef,
-	},
-	{
-		0x57, 0xfe, 0x6c, 0x7c, 0xfd, 0x58, 0x17, 0x60,
-		0xf5, 0x63, 0xea, 0xa9, 0x7e, 0xa2, 0x56, 0x7a,
-		0x16, 0x1a, 0x27, 0x23, 0xb7, 0x00, 0xff, 0xdf,
-		0xa3, 0xf5, 0x3a, 0x25, 0x47, 0x17, 0xcd, 0xbf,
-		0xbd, 0xff, 0x0f, 0x80, 0xd7, 0x35, 0x9e, 0x35,
-		0x4a, 0x10, 0x86, 0x16, 0x1f, 0x1c, 0x15, 0x7f,
-		0x63, 0x23, 0xa9, 0x6c, 0x0c, 0x41, 0x3f, 0x9a,
-		0x99, 0x47, 0x47, 0xad, 0xac, 0x6b, 0xea, 0x4b,
-	},
-	{
-		0x6e, 0x7d, 0x64, 0x46, 0x7a, 0x40, 0x68, 0xfa,
-		0x35, 0x4f, 0x90, 0x36, 0x72, 0xc5, 0x71, 0xbf,
-		0xb6, 0xc6, 0xbe, 0xc2, 0x66, 0x1f, 0xf2, 0x0a,
-		0xb4, 0xb7, 0x9a, 0x1c, 0xb7, 0xa6, 0xfa, 0xcf,
-		0xc6, 0x8e, 0xf0, 0x9a, 0xb4, 0x9a, 0x7f, 0x18,
-		0x6c, 0xa4, 0x42, 0x51, 0xf9, 0xc4, 0x66, 0x2d,
-		0xc0, 0x39, 0x30, 0x7a, 0x3b, 0xc3, 0xa4, 0x6f,
-		0xd9, 0xd3, 0x3a, 0x1d, 0xae, 0xae, 0x4f, 0xae,
-	},
-	{
-		0x93, 0xd4, 0x14, 0x3a, 0x4d, 0x56, 0x86, 0x88,
-		0xf3, 0x4a, 0x3c, 0xa2, 0x4c, 0x45, 0x17, 0x35,
-		0x04, 0x05, 0x4a, 0x28, 0x83, 0x69, 0x47, 0x06,
-		0x37, 0x2c, 0x82, 0x2d, 0xc5, 0xab, 0x92, 0x09,
-		0xc9, 0x93, 0x7a, 0x19, 0x33, 0x3e, 0x47, 0xd3,
-		0xc9, 0x87, 0xbf, 0xe6, 0xc7, 0xc6, 0x9e, 0x39,
-		0x54, 0x09, 0x24, 0xbf, 0xfe, 0x86, 0xac, 0x51,
-		0xec, 0xc5, 0xaa, 0xee, 0x16, 0x0e, 0xc7, 0xf4,
-	},
-	{
-		0x1e, 0xe7, 0x02, 0xbf, 0xd4, 0x0d, 0x7f, 0xa4,
-		0xd9, 0xa8, 0x51, 0x59, 0x35, 0xc2, 0xac, 0x36,
-		0x2f, 0xc4, 0xa5, 0xd1, 0x2b, 0x8d, 0xd1, 0x69,
-		0x90, 0x06, 0x9b, 0x92, 0xcb, 0x2b, 0x89, 0xf4,
-		0x9a, 0xc4, 0xdb, 0x4d, 0x3b, 0x44, 0xb4, 0x89,
-		0x1e, 0xde, 0x36, 0x9c, 0x71, 0xf8, 0xb7, 0x4e,
-		0x41, 0x41, 0x6e, 0x0c, 0x02, 0xaa, 0xe7, 0x03,
-		0xa7, 0xc9, 0x93, 0x4d, 0x42, 0x5b, 0x1f, 0x9b,
-	},
-	{
-		0xdb, 0x5a, 0x23, 0x83, 0x51, 0x44, 0x61, 0x72,
-		0x60, 0x2a, 0x1f, 0xcb, 0x92, 0xdc, 0x38, 0x0e,
-		0x54, 0x9c, 0x07, 0xa6, 0x9a, 0x8a, 0x2b, 0x7b,
-		0xb1, 0xce, 0xb2, 0xdb, 0x0b, 0x44, 0x0a, 0x80,
-		0x84, 0x09, 0x0d, 0xe0, 0xb7, 0x55, 0xd9, 0x3c,
-		0x24, 0x42, 0x89, 0x25, 0x1b, 0x3a, 0x7d, 0x3a,
-		0xde, 0x5f, 0x16, 0xec, 0xd8, 0x9a, 0x4c, 0x94,
-		0x9b, 0x22, 0x31, 0x16, 0x54, 0x5a, 0x8f, 0x37,
-	},
-	{
-		0xed, 0x9c, 0x45, 0x98, 0xfb, 0xc7, 0xb4, 0x74,
-		0xc3, 0xb6, 0x3b, 0x15, 0xd1, 0xfa, 0x98, 0x36,
-		0xf4, 0x52, 0x76, 0x3b, 0x30, 0x6c, 0x1e, 0x7a,
-		0x4b, 0x33, 0x69, 0xaf, 0x02, 0x67, 0xe7, 0x9f,
-		0x03, 0x61, 0x33, 0x1b, 0x8a, 0xe1, 0xff, 0x1f,
-		0xdb, 0x78, 0x8a, 0xff, 0x1c, 0xe7, 0x41, 0x89,
-		0xf3, 0xf3, 0xe4, 0xb2, 0x48, 0xe5, 0x2a, 0x38,
-		0x52, 0x6f, 0x05, 0x80, 0xa6, 0xde, 0xbe, 0xab,
-	},
-	{
-		0x1b, 0x2d, 0xf3, 0x81, 0xcd, 0xa4, 0xca, 0x6b,
-		0x5d, 0xd8, 0x6f, 0xc0, 0x4a, 0x59, 0xa2, 0xde,
-		0x98, 0x6e, 0x47, 0x7d, 0x1d, 0xcd, 0xba, 0xef,
-		0xca, 0xb9, 0x48, 0xea, 0xef, 0x71, 0x1d, 0x8a,
-		0x79, 0x66, 0x84, 0x14, 0x21, 0x80, 0x01, 0x20,
-		0x61, 0x07, 0xab, 0xeb, 0xbb, 0x6b, 0xfa, 0xd8,
-		0x94, 0xfe, 0x5a, 0x63, 0xcd, 0xc6, 0x02, 0x30,
-		0xfb, 0x89, 0xc8, 0xef, 0xd0, 0x9e, 0xcd, 0x7b,
-	},
-	{
-		0x20, 0xd7, 0x1b, 0xf1, 0x4a, 0x92, 0xbc, 0x48,
-		0x99, 0x1b, 0xb2, 0xd9, 0xd5, 0x17, 0xf4, 0xfa,
-		0x52, 0x28, 0xe1, 0x88, 0xaa, 0xa4, 0x1d, 0xe7,
-		0x86, 0xcc, 0x91, 0x18, 0x9d, 0xef, 0x80, 0x5d,
-		0x9b, 0x9f, 0x21, 0x30, 0xd4, 0x12, 0x20, 0xf8,
-		0x77, 0x1d, 0xdf, 0xbc, 0x32, 0x3c, 0xa4, 0xcd,
-		0x7a, 0xb1, 0x49, 0x04, 0xb0, 0x80, 0x13, 0xd2,
-		0xba, 0x31, 0x16, 0xf1, 0x67, 0xe7, 0x8e, 0x37,
-	},
-}
-
-Streebog_Context :: struct {
-	buffer:    [64]byte,
-	h:         [64]byte,
-	n:         [64]byte,
-	sigma:     [64]byte,
-	v_0:       [64]byte,
-	v_512:     [64]byte,
-	buf_size:  u64,
-	hash_size: int,
-	is256:	   bool,
-}
-
-add_mod_512 :: proc(first_vector, second_vector, result_vector: []byte) {
-	t: i32 = 0
-	for i: i32 = 0; i < 64; i += 1 {
-		t = i32(first_vector[i]) + i32(second_vector[i]) + (t >> 8)
-		result_vector[i] = byte(t & 0xff)
-	}
-}
-
-X :: #force_inline proc(a, k, out: []byte) {
-	for i := 0; i < 64; i += 1 {
-		out[i] = a[i] ~ k[i]
-	}
-}
-
-S :: #force_inline proc(state: []byte) {
-	t: [64]byte
-	for i: i32 = 63; i >= 0; i -= 1 {
-		t[i] = PI[state[i]]
-	}
-	copy(state, t[:])
-}
-
-P :: #force_inline proc(state: []byte) {
-	t: [64]byte
-	for i: i32 = 63; i >= 0; i -= 1 {
-		t[i] = state[TAU[i]]
-	}
-	copy(state, t[:])
-}
-
-L :: #force_inline proc(state: []byte) {
-	ins := util.cast_slice([]u64, state)
-	out: [8]u64
-	for i: i32 = 7; i >= 0; i -= 1 {
-		for j: i32 = 63; j >= 0; j -= 1 {
-			if (ins[i] >> u32(j)) & 1 != 0 {
-				out[i] ~= STREEBOG_A[63 - j]
-			}	
-		}
-	}
-	copy(state, util.cast_slice([]byte, out[:]))
-}
-
-E :: #force_inline proc(K, m, state: []byte) {
-	X(m, K, state)
-	for i: i32 = 0; i < 12; i += 1 {
-		S(state)
-		P(state)
-		L(state)
-		get_key(K, i)
-		X(state, K, state)
-	}
-}
-
-get_key :: #force_inline proc(K: []byte, i: i32) {
-	X(K, STREEBOG_C[i][:], K)
-	S(K)
-	P(K)
-	L(K)
-}
-
-G :: #force_inline proc(h, N, m: []byte) {
-	t, K: [64]byte
-	X(N, h, K[:])
-	S(K[:])
-	P(K[:])
-	L(K[:])
-	E(K[:], m, t[:])
-	X(t[:], h, t[:])
-	X(t[:], m, h)
-}
-
-stage2 :: proc(ctx: ^Streebog_Context, m: []byte) {
-	G(ctx.h[:], ctx.n[:], m)
-	add_mod_512(ctx.n[:], ctx.v_512[:], ctx.n[:])
-	add_mod_512(ctx.sigma[:], m, ctx.sigma[:])
-}
-
-padding :: proc(ctx: ^Streebog_Context) {
-	if ctx.buf_size < 64 {
-		t: [64]byte
-		copy(t[:], ctx.buffer[:int(ctx.buf_size)])
-		t[ctx.buf_size] = 0x01
-		copy(ctx.buffer[:], t[:])
-	}
-}

+ 0 - 280
core/crypto/tiger/tiger.odin

@@ -1,280 +0,0 @@
-package tiger
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Interface for the Tiger1 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
-*/
-
-import "core:os"
-import "core:io"
-
-import "../_tiger"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_160 :: 20
-DIGEST_SIZE_192 :: 24
-
-// hash_string_128 will hash the given input and return the
-// computed hash
-hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128(transmute([]byte)(data))
-}
-
-// hash_bytes_128 will hash the given input and return the
-// computed hash
-hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_128 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128 will read the file provided by the given handle
-// and compute a hash
-hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128 :: proc {
-    hash_stream_128,
-    hash_file_128,
-    hash_bytes_128,
-    hash_string_128,
-    hash_bytes_to_buffer_128,
-    hash_string_to_buffer_128,
-}
-
-// hash_string_160 will hash the given input and return the
-// computed hash
-hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160(transmute([]byte)(data))
-}
-
-// hash_bytes_160 will hash the given input and return the
-// computed hash
-hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_160 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160 will read the file provided by the given handle
-// and compute a hash
-hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160 :: proc {
-    hash_stream_160,
-    hash_file_160,
-    hash_bytes_160,
-    hash_string_160,
-    hash_bytes_to_buffer_160,
-    hash_string_to_buffer_160,
-}
-
-// hash_string_192 will hash the given input and return the
-// computed hash
-hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192(transmute([]byte)(data))
-}
-
-// hash_bytes_192 will hash the given input and return the
-// computed hash
-hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_192 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_192 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 1
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_192 will read the file provided by the given handle
-// and compute a hash
-hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192 :: proc {
-    hash_stream_192,
-    hash_file_192,
-    hash_bytes_192,
-    hash_string_192,
-    hash_bytes_to_buffer_192,
-    hash_string_to_buffer_192,
-}
-
-/*
-    Low level API
-*/
-
-Tiger_Context :: _tiger.Tiger_Context
-
-init :: proc(ctx: ^_tiger.Tiger_Context) {
-    ctx.ver = 1
-    _tiger.init(ctx)
-}
-
-update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) {
-    _tiger.update(ctx, data)
-}
-
-final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) {
-    _tiger.final(ctx, hash)
-}

+ 0 - 280
core/crypto/tiger2/tiger2.odin

@@ -1,280 +0,0 @@
-package tiger2
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Interface for the Tiger2 variant of the Tiger hashing algorithm as defined in <https://www.cs.technion.ac.il/~biham/Reports/Tiger/>
-*/
-
-import "core:os"
-import "core:io"
-
-import "../_tiger"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_160 :: 20
-DIGEST_SIZE_192 :: 24
-
-// hash_string_128 will hash the given input and return the
-// computed hash
-hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128(transmute([]byte)(data))
-}
-
-// hash_bytes_128 will hash the given input and return the
-// computed hash
-hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_128 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_128 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_128 will read the file provided by the given handle
-// and compute a hash
-hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128 :: proc {
-    hash_stream_128,
-    hash_file_128,
-    hash_bytes_128,
-    hash_string_128,
-    hash_bytes_to_buffer_128,
-    hash_string_to_buffer_128,
-}
-
-// hash_string_160 will hash the given input and return the
-// computed hash
-hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160(transmute([]byte)(data))
-}
-
-// hash_bytes_160 will hash the given input and return the
-// computed hash
-hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_160 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_160 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_160 will read the file provided by the given handle
-// and compute a hash
-hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160 :: proc {
-    hash_stream_160,
-    hash_file_160,
-    hash_bytes_160,
-    hash_string_160,
-    hash_bytes_to_buffer_160,
-    hash_string_to_buffer_160,
-}
-
-// hash_string_192 will hash the given input and return the
-// computed hash
-hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192(transmute([]byte)(data))
-}
-
-// hash_bytes_192 will hash the given input and return the
-// computed hash
-hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer_192 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    _tiger.update(&ctx, data)
-    _tiger.final(&ctx, hash)
-}
-
-// hash_stream_192 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: _tiger.Tiger_Context
-    ctx.ver = 2
-    _tiger.init(&ctx)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    read := 1
-    for read > 0 {
-        read, _ = io.read(s, buf)
-        if read > 0 {
-            _tiger.update(&ctx, buf[:read])
-        } 
-    }
-    _tiger.final(&ctx, hash[:])
-    return hash, true
-}
-
-// hash_file_192 will read the file provided by the given handle
-// and compute a hash
-hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192 :: proc {
-    hash_stream_192,
-    hash_file_192,
-    hash_bytes_192,
-    hash_string_192,
-    hash_bytes_to_buffer_192,
-    hash_string_to_buffer_192,
-}
-
-/*
-    Low level API
-*/
-
-Tiger_Context :: _tiger.Tiger_Context
-
-init :: proc(ctx: ^_tiger.Tiger_Context) {
-    ctx.ver = 2
-    _tiger.init(ctx)
-}
-
-update :: proc(ctx: ^_tiger.Tiger_Context, data: []byte) {
-    _tiger.update(ctx, data)
-}
-
-final :: proc(ctx: ^_tiger.Tiger_Context, hash: []byte) {
-    _tiger.final(ctx, hash)
-}

+ 0 - 146
core/crypto/util/util.odin

@@ -1,146 +0,0 @@
-package util
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Various utility procedures
-*/
-
-import "core:mem"
-// Keep vet happy
-_ :: mem
-
-// @note(bp): this can replace the other two
-cast_slice :: #force_inline proc "contextless" ($D: typeid/[]$DE, src: $S/[]$SE) -> D {
-    src := src
-    dst := (^mem.Raw_Slice)(&src)
-
-    when size_of(DE) < size_of(SE) {
-        when size_of(DE) % size_of(SE) == 0 {
-            dst.len /= size_of(SE) / size_of(DE)
-        } else {
-            dst.len *= size_of(SE)
-            dst.len /= size_of(DE)
-        }
-    } else when size_of(DE) > size_of(SE) {
-        when size_of(DE) % size_of(SE) == 0 {
-            dst.len *= size_of(DE) / size_of(SE)
-        } else {
-            dst.len *= size_of(SE)
-            dst.len /= size_of(DE)
-        }
-    } else when size_of(DE) != size_of(SE) {
-        #assert(size_of(DE) % size_of(SE) == 0, "Different size detected")
-        dst.len *= size_of(SE)
-        dst.len /= size_of(DE)
-    }
-
-    return (^D)(dst)^
-}
-
-bytes_to_slice :: #force_inline proc "contextless" ($T: typeid/[]$E, bytes: []byte) -> T {
-    s := transmute(mem.Raw_Slice)bytes
-    s.len /= size_of(E)
-    return transmute(T)s
-}
-
-slice_to_bytes :: #force_inline proc "contextless" (slice: $E/[]$T) -> []byte {
-    s := transmute(mem.Raw_Slice)slice
-    s.len *= size_of(T)
-    return transmute([]byte)s
-}
-
-ROTL16 :: #force_inline proc "contextless" (a, b: u16) -> u16 {
-    return ((a << b) | (a >> (16 - b)))
-}
-
-ROTR16 :: #force_inline proc "contextless" (a, b: u16) -> u16 {
-    return ((a >> b) | (a << (16 - b)))
-}
-
-ROTL32 :: #force_inline proc "contextless"(a: u32, b: int) -> u32 {
-    s := uint(b) & 31
-    return (a << s) | (a >> (32 - s))
-}
-
-ROTR32 :: #force_inline proc "contextless" (a: u32, b: int) -> u32 {
-    s := uint(b) & 31
-    return (a >> s) | (a << (32 - s))
-}
-
-ROTL64 :: #force_inline proc "contextless" (a, b: u64) -> u64 {
-    return ((a << b) | (a >> (64 - b)))
-}
-
-ROTR64 :: #force_inline proc "contextless" (a, b: u64) -> u64 {
-    return ((a >> b) | (a << (64 - b)))
-}
-
-ROTL128 :: #force_inline proc "contextless" (a, b, c, d: ^u32, n: uint) {
-    a, b, c, d := a, b, c, d
-    t := a^ >> (32 - n)
-    a^ = ((a^ << n) | (b^ >> (32 - n)))
-    b^ = ((b^ << n) | (c^ >> (32 - n)))
-    c^ = ((c^ << n) | (d^ >> (32 - n)))
-    d^ = ((d^ << n) | t)
-}
-
-U32_LE :: #force_inline proc "contextless" (b: []byte) -> u32 {
-    return u32(b[0]) | u32(b[1]) << 8 | u32(b[2]) << 16 | u32(b[3]) << 24
-}
-
-U64_LE :: #force_inline proc "contextless" (b: []byte) -> u64 {
-    return u64(b[0])       | u64(b[1]) << 8  | u64(b[2]) << 16 | u64(b[3]) << 24 |
-           u64(b[4]) << 32 | u64(b[5]) << 40 | u64(b[6]) << 48 | u64(b[7]) << 56
-}
-
-U64_BE :: #force_inline proc "contextless" (b: []byte) -> u64 {
-    return u64(b[7])       | u64(b[6]) << 8  | u64(b[5]) << 16 | u64(b[4]) << 24 |
-           u64(b[3]) << 32 | u64(b[2]) << 40 | u64(b[1]) << 48 | u64(b[0]) << 56
-}
-
-PUT_U64_LE :: #force_inline proc "contextless" (b: []byte, v: u64) {
-    b[0] = byte(v)
-    b[1] = byte(v >> 8)
-    b[2] = byte(v >> 16)
-    b[3] = byte(v >> 24)
-    b[4] = byte(v >> 32)
-    b[5] = byte(v >> 40)
-    b[6] = byte(v >> 48)
-    b[7] = byte(v >> 56)
-}
-
-PUT_U32_LE :: #force_inline proc "contextless" (b: []byte, v: u32) {
-    b[0] = byte(v)
-    b[1] = byte(v >> 8)
-    b[2] = byte(v >> 16)
-    b[3] = byte(v >> 24)
-}
-
-PUT_U32_BE :: #force_inline proc "contextless" (b: []byte, v: u32) {
-    b[0] = byte(v >> 24)
-    b[1] = byte(v >> 16)
-    b[2] = byte(v >> 8)
-    b[3] = byte(v)
-}
-
-PUT_U64_BE :: #force_inline proc "contextless" (b: []byte, v: u64) {
-    b[0] = byte(v >> 56)
-    b[1] = byte(v >> 48)
-    b[2] = byte(v >> 40)
-    b[3] = byte(v >> 32)
-    b[4] = byte(v >> 24)
-    b[5] = byte(v >> 16)
-    b[6] = byte(v >> 8)
-    b[7] = byte(v)
-}
-
-XOR_BUF :: #force_inline proc "contextless" (input, output: []byte) {
-    for i := 0; i < len(input); i += 1 {
-        output[i] ~= input[i]
-    }
-}

+ 0 - 806
core/crypto/whirlpool/whirlpool.odin

@@ -1,806 +0,0 @@
-package whirlpool
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the Whirlpool hashing algorithm, as defined in <https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html>
-*/
-
-import "core:os"
-import "core:io"
-
-import "../util"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 64
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Whirlpool_Context
-    // init(&ctx) No-op
-    update(&ctx, data)
-    final(&ctx, hash[:])
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: Whirlpool_Context
-    // init(&ctx) No-op
-    update(&ctx, data)
-    final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Whirlpool_Context
-	// init(&ctx) No-op
-	buf := make([]byte, 512)
-	defer delete(buf)
-	read := 1
-	for read > 0 {
-	    read, _ = io.read(s, buf)
-	    if read > 0 {
-			update(&ctx, buf[:read])
-	    } 
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-@(warning="Init is a no-op for Whirlpool")
-init :: proc(ctx: ^Whirlpool_Context) {
-	// No action needed here
-}
-
-update :: proc(ctx: ^Whirlpool_Context, source: []byte) {
-    source_pos: int
-    nn := len(source)
-    source_bits := u64(nn * 8)
-    source_gap := u32((8 - (int(source_bits & 7))) & 7)
-    buffer_rem := uint(ctx.buffer_bits & 7)
-    b: u32
-
-	for i, carry, value := 31, u32(0), u32(source_bits); i >= 0 && (carry != 0 || value != 0); i -= 1 {
-		carry += u32(ctx.bitlength[i]) + (u32(value & 0xff))
-		ctx.bitlength[i] = byte(carry)
-		carry >>= 8
-		value >>= 8
-	}
-
-	for source_bits > 8 {
-		b = u32(u32((source[source_pos] << source_gap) & 0xff) | u32((source[source_pos+1] & 0xff) >> (8 - source_gap)))
-
-		ctx.buffer[ctx.buffer_pos] |= u8(b >> buffer_rem)
-		ctx.buffer_pos += 1
-		ctx.buffer_bits += int(8 - buffer_rem)
-
-		if ctx.buffer_bits == 512 {
-			transform(ctx)
-			ctx.buffer_bits = 0
-			ctx.buffer_pos = 0
-		}
-		ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem))
-		ctx.buffer_bits += int(buffer_rem)
-		source_bits -= 8
-		source_pos += 1
-	}
-
-	if source_bits > 0 {
-		b = u32((source[source_pos] << source_gap) & 0xff)
-		ctx.buffer[ctx.buffer_pos] |= byte(b) >> buffer_rem
-	} else {b = 0}
-
-	if u64(buffer_rem) + source_bits < 8 {
-		ctx.buffer_bits += int(source_bits)
-	} else {
-		ctx.buffer_pos += 1
-		ctx.buffer_bits += 8 - int(buffer_rem)
-		source_bits -= u64(8 - buffer_rem)
-
-		if ctx.buffer_bits == 512 {
-			transform(ctx)
-			ctx.buffer_bits = 0
-			ctx.buffer_pos = 0
-		}
-		ctx.buffer[ctx.buffer_pos] = byte(b << (8 - buffer_rem))
-		ctx.buffer_bits += int(source_bits)
-	}
-}
-
-final :: proc(ctx: ^Whirlpool_Context, hash: []byte) {
-	n := ctx
-	n.buffer[n.buffer_pos] |= 0x80 >> (uint(n.buffer_bits) & 7)
-	n.buffer_pos += 1
-
-	if n.buffer_pos > 64 - 32 {
-		if n.buffer_pos < 64 {
-			for i := 0; i < 64 - n.buffer_pos; i += 1 {
-				n.buffer[n.buffer_pos + i] = 0
-			}
-		}
-		transform(ctx)
-		n.buffer_pos = 0
-	}
-
-	if n.buffer_pos < 64 - 32 {
-		for i := 0; i < (64 - 32) - n.buffer_pos; i += 1 {
-			n.buffer[n.buffer_pos + i] = 0
-		}
-	}
-	n.buffer_pos = 64 - 32
-
-	for i := 0; i < 32; i += 1 {
-		n.buffer[n.buffer_pos + i] = n.bitlength[i]
-	}
-	transform(ctx)
-
-	for i := 0; i < 8; i += 1 {
-		hash[i * 8]     = byte(n.hash[i] >> 56)
-		hash[i * 8 + 1] = byte(n.hash[i] >> 48)
-		hash[i * 8 + 2] = byte(n.hash[i] >> 40)
-		hash[i * 8 + 3] = byte(n.hash[i] >> 32)
-		hash[i * 8 + 4] = byte(n.hash[i] >> 24)
-		hash[i * 8 + 5] = byte(n.hash[i] >> 16)
-		hash[i * 8 + 6] = byte(n.hash[i] >> 8)
-		hash[i * 8 + 7] = byte(n.hash[i])
-	}
-}
-
-/*
-	Whirlpool implementation
-*/
-
-ROUNDS :: 10
-
-Whirlpool_Context :: struct {
-    bitlength:   [32]byte,
-    buffer:      [64]byte,
-    buffer_bits: int,
-    buffer_pos:  int,
-    hash:        [8]u64,
-}
-
-C0 := [256]u64 {
-	0x18186018c07830d8, 0x23238c2305af4626, 0xc6c63fc67ef991b8, 0xe8e887e8136fcdfb,
-	0x878726874ca113cb, 0xb8b8dab8a9626d11, 0x0101040108050209, 0x4f4f214f426e9e0d,
-	0x3636d836adee6c9b, 0xa6a6a2a6590451ff, 0xd2d26fd2debdb90c, 0xf5f5f3f5fb06f70e,
-	0x7979f979ef80f296, 0x6f6fa16f5fcede30, 0x91917e91fcef3f6d, 0x52525552aa07a4f8,
-	0x60609d6027fdc047, 0xbcbccabc89766535, 0x9b9b569baccd2b37, 0x8e8e028e048c018a,
-	0xa3a3b6a371155bd2, 0x0c0c300c603c186c, 0x7b7bf17bff8af684, 0x3535d435b5e16a80,
-	0x1d1d741de8693af5, 0xe0e0a7e05347ddb3, 0xd7d77bd7f6acb321, 0xc2c22fc25eed999c,
-	0x2e2eb82e6d965c43, 0x4b4b314b627a9629, 0xfefedffea321e15d, 0x575741578216aed5,
-	0x15155415a8412abd, 0x7777c1779fb6eee8, 0x3737dc37a5eb6e92, 0xe5e5b3e57b56d79e,
-	0x9f9f469f8cd92313, 0xf0f0e7f0d317fd23, 0x4a4a354a6a7f9420, 0xdada4fda9e95a944,
-	0x58587d58fa25b0a2, 0xc9c903c906ca8fcf, 0x2929a429558d527c, 0x0a0a280a5022145a,
-	0xb1b1feb1e14f7f50, 0xa0a0baa0691a5dc9, 0x6b6bb16b7fdad614, 0x85852e855cab17d9,
-	0xbdbdcebd8173673c, 0x5d5d695dd234ba8f, 0x1010401080502090, 0xf4f4f7f4f303f507,
-	0xcbcb0bcb16c08bdd, 0x3e3ef83eedc67cd3, 0x0505140528110a2d, 0x676781671fe6ce78,
-	0xe4e4b7e47353d597, 0x27279c2725bb4e02, 0x4141194132588273, 0x8b8b168b2c9d0ba7,
-	0xa7a7a6a7510153f6, 0x7d7de97dcf94fab2, 0x95956e95dcfb3749, 0xd8d847d88e9fad56,
-	0xfbfbcbfb8b30eb70, 0xeeee9fee2371c1cd, 0x7c7ced7cc791f8bb, 0x6666856617e3cc71,
-	0xdddd53dda68ea77b, 0x17175c17b84b2eaf, 0x4747014702468e45, 0x9e9e429e84dc211a,
-	0xcaca0fca1ec589d4, 0x2d2db42d75995a58, 0xbfbfc6bf9179632e, 0x07071c07381b0e3f,
-	0xadad8ead012347ac, 0x5a5a755aea2fb4b0, 0x838336836cb51bef, 0x3333cc3385ff66b6,
-	0x636391633ff2c65c, 0x02020802100a0412, 0xaaaa92aa39384993, 0x7171d971afa8e2de,
-	0xc8c807c80ecf8dc6, 0x19196419c87d32d1, 0x494939497270923b, 0xd9d943d9869aaf5f,
-	0xf2f2eff2c31df931, 0xe3e3abe34b48dba8, 0x5b5b715be22ab6b9, 0x88881a8834920dbc,
-	0x9a9a529aa4c8293e, 0x262698262dbe4c0b, 0x3232c8328dfa64bf, 0xb0b0fab0e94a7d59,
-	0xe9e983e91b6acff2, 0x0f0f3c0f78331e77, 0xd5d573d5e6a6b733, 0x80803a8074ba1df4,
-	0xbebec2be997c6127, 0xcdcd13cd26de87eb, 0x3434d034bde46889, 0x48483d487a759032,
-	0xffffdbffab24e354, 0x7a7af57af78ff48d, 0x90907a90f4ea3d64, 0x5f5f615fc23ebe9d,
-	0x202080201da0403d, 0x6868bd6867d5d00f, 0x1a1a681ad07234ca, 0xaeae82ae192c41b7,
-	0xb4b4eab4c95e757d, 0x54544d549a19a8ce, 0x93937693ece53b7f, 0x222288220daa442f,
-	0x64648d6407e9c863, 0xf1f1e3f1db12ff2a, 0x7373d173bfa2e6cc, 0x12124812905a2482,
-	0x40401d403a5d807a, 0x0808200840281048, 0xc3c32bc356e89b95, 0xecec97ec337bc5df,
-	0xdbdb4bdb9690ab4d, 0xa1a1bea1611f5fc0, 0x8d8d0e8d1c830791, 0x3d3df43df5c97ac8,
-	0x97976697ccf1335b, 0x0000000000000000, 0xcfcf1bcf36d483f9, 0x2b2bac2b4587566e,
-	0x7676c57697b3ece1, 0x8282328264b019e6, 0xd6d67fd6fea9b128, 0x1b1b6c1bd87736c3,
-	0xb5b5eeb5c15b7774, 0xafaf86af112943be, 0x6a6ab56a77dfd41d, 0x50505d50ba0da0ea,
-	0x45450945124c8a57, 0xf3f3ebf3cb18fb38, 0x3030c0309df060ad, 0xefef9bef2b74c3c4,
-	0x3f3ffc3fe5c37eda, 0x55554955921caac7, 0xa2a2b2a2791059db, 0xeaea8fea0365c9e9,
-	0x656589650fecca6a, 0xbabad2bab9686903, 0x2f2fbc2f65935e4a, 0xc0c027c04ee79d8e,
-	0xdede5fdebe81a160, 0x1c1c701ce06c38fc, 0xfdfdd3fdbb2ee746, 0x4d4d294d52649a1f,
-	0x92927292e4e03976, 0x7575c9758fbceafa, 0x06061806301e0c36, 0x8a8a128a249809ae,
-	0xb2b2f2b2f940794b, 0xe6e6bfe66359d185, 0x0e0e380e70361c7e, 0x1f1f7c1ff8633ee7,
-	0x6262956237f7c455, 0xd4d477d4eea3b53a, 0xa8a89aa829324d81, 0x96966296c4f43152,
-	0xf9f9c3f99b3aef62, 0xc5c533c566f697a3, 0x2525942535b14a10, 0x59597959f220b2ab,
-	0x84842a8454ae15d0, 0x7272d572b7a7e4c5, 0x3939e439d5dd72ec, 0x4c4c2d4c5a619816,
-	0x5e5e655eca3bbc94, 0x7878fd78e785f09f, 0x3838e038ddd870e5, 0x8c8c0a8c14860598,
-	0xd1d163d1c6b2bf17, 0xa5a5aea5410b57e4, 0xe2e2afe2434dd9a1, 0x616199612ff8c24e,
-	0xb3b3f6b3f1457b42, 0x2121842115a54234, 0x9c9c4a9c94d62508, 0x1e1e781ef0663cee,
-	0x4343114322528661, 0xc7c73bc776fc93b1, 0xfcfcd7fcb32be54f, 0x0404100420140824,
-	0x51515951b208a2e3, 0x99995e99bcc72f25, 0x6d6da96d4fc4da22, 0x0d0d340d68391a65,
-	0xfafacffa8335e979, 0xdfdf5bdfb684a369, 0x7e7ee57ed79bfca9, 0x242490243db44819,
-	0x3b3bec3bc5d776fe, 0xabab96ab313d4b9a, 0xcece1fce3ed181f0, 0x1111441188552299,
-	0x8f8f068f0c890383, 0x4e4e254e4a6b9c04, 0xb7b7e6b7d1517366, 0xebeb8beb0b60cbe0,
-	0x3c3cf03cfdcc78c1, 0x81813e817cbf1ffd, 0x94946a94d4fe3540, 0xf7f7fbf7eb0cf31c,
-	0xb9b9deb9a1676f18, 0x13134c13985f268b, 0x2c2cb02c7d9c5851, 0xd3d36bd3d6b8bb05,
-	0xe7e7bbe76b5cd38c, 0x6e6ea56e57cbdc39, 0xc4c437c46ef395aa, 0x03030c03180f061b,
-	0x565645568a13acdc, 0x44440d441a49885e, 0x7f7fe17fdf9efea0, 0xa9a99ea921374f88,
-	0x2a2aa82a4d825467, 0xbbbbd6bbb16d6b0a, 0xc1c123c146e29f87, 0x53535153a202a6f1,
-	0xdcdc57dcae8ba572, 0x0b0b2c0b58271653, 0x9d9d4e9d9cd32701, 0x6c6cad6c47c1d82b,
-	0x3131c43195f562a4, 0x7474cd7487b9e8f3, 0xf6f6fff6e309f115, 0x464605460a438c4c,
-	0xacac8aac092645a5, 0x89891e893c970fb5, 0x14145014a04428b4, 0xe1e1a3e15b42dfba,
-	0x16165816b04e2ca6, 0x3a3ae83acdd274f7, 0x6969b9696fd0d206, 0x09092409482d1241,
-	0x7070dd70a7ade0d7, 0xb6b6e2b6d954716f, 0xd0d067d0ceb7bd1e, 0xeded93ed3b7ec7d6,
-	0xcccc17cc2edb85e2, 0x424215422a578468, 0x98985a98b4c22d2c, 0xa4a4aaa4490e55ed,
-	0x2828a0285d885075, 0x5c5c6d5cda31b886, 0xf8f8c7f8933fed6b, 0x8686228644a411c2,
-}
-
-C1 := [256]u64 {
-	0xd818186018c07830, 0x2623238c2305af46, 0xb8c6c63fc67ef991, 0xfbe8e887e8136fcd,
-	0xcb878726874ca113, 0x11b8b8dab8a9626d, 0x0901010401080502, 0x0d4f4f214f426e9e,
-	0x9b3636d836adee6c, 0xffa6a6a2a6590451, 0x0cd2d26fd2debdb9, 0x0ef5f5f3f5fb06f7,
-	0x967979f979ef80f2, 0x306f6fa16f5fcede, 0x6d91917e91fcef3f, 0xf852525552aa07a4,
-	0x4760609d6027fdc0, 0x35bcbccabc897665, 0x379b9b569baccd2b, 0x8a8e8e028e048c01,
-	0xd2a3a3b6a371155b, 0x6c0c0c300c603c18, 0x847b7bf17bff8af6, 0x803535d435b5e16a,
-	0xf51d1d741de8693a, 0xb3e0e0a7e05347dd, 0x21d7d77bd7f6acb3, 0x9cc2c22fc25eed99,
-	0x432e2eb82e6d965c, 0x294b4b314b627a96, 0x5dfefedffea321e1, 0xd5575741578216ae,
-	0xbd15155415a8412a, 0xe87777c1779fb6ee, 0x923737dc37a5eb6e, 0x9ee5e5b3e57b56d7,
-	0x139f9f469f8cd923, 0x23f0f0e7f0d317fd, 0x204a4a354a6a7f94, 0x44dada4fda9e95a9,
-	0xa258587d58fa25b0, 0xcfc9c903c906ca8f, 0x7c2929a429558d52, 0x5a0a0a280a502214,
-	0x50b1b1feb1e14f7f, 0xc9a0a0baa0691a5d, 0x146b6bb16b7fdad6, 0xd985852e855cab17,
-	0x3cbdbdcebd817367, 0x8f5d5d695dd234ba, 0x9010104010805020, 0x07f4f4f7f4f303f5,
-	0xddcbcb0bcb16c08b, 0xd33e3ef83eedc67c, 0x2d0505140528110a, 0x78676781671fe6ce,
-	0x97e4e4b7e47353d5, 0x0227279c2725bb4e, 0x7341411941325882, 0xa78b8b168b2c9d0b,
-	0xf6a7a7a6a7510153, 0xb27d7de97dcf94fa, 0x4995956e95dcfb37, 0x56d8d847d88e9fad,
-	0x70fbfbcbfb8b30eb, 0xcdeeee9fee2371c1, 0xbb7c7ced7cc791f8, 0x716666856617e3cc,
-	0x7bdddd53dda68ea7, 0xaf17175c17b84b2e, 0x454747014702468e, 0x1a9e9e429e84dc21,
-	0xd4caca0fca1ec589, 0x582d2db42d75995a, 0x2ebfbfc6bf917963, 0x3f07071c07381b0e,
-	0xacadad8ead012347, 0xb05a5a755aea2fb4, 0xef838336836cb51b, 0xb63333cc3385ff66,
-	0x5c636391633ff2c6, 0x1202020802100a04, 0x93aaaa92aa393849, 0xde7171d971afa8e2,
-	0xc6c8c807c80ecf8d, 0xd119196419c87d32, 0x3b49493949727092, 0x5fd9d943d9869aaf,
-	0x31f2f2eff2c31df9, 0xa8e3e3abe34b48db, 0xb95b5b715be22ab6, 0xbc88881a8834920d,
-	0x3e9a9a529aa4c829, 0x0b262698262dbe4c, 0xbf3232c8328dfa64, 0x59b0b0fab0e94a7d,
-	0xf2e9e983e91b6acf, 0x770f0f3c0f78331e, 0x33d5d573d5e6a6b7, 0xf480803a8074ba1d,
-	0x27bebec2be997c61, 0xebcdcd13cd26de87, 0x893434d034bde468, 0x3248483d487a7590,
-	0x54ffffdbffab24e3, 0x8d7a7af57af78ff4, 0x6490907a90f4ea3d, 0x9d5f5f615fc23ebe,
-	0x3d202080201da040, 0x0f6868bd6867d5d0, 0xca1a1a681ad07234, 0xb7aeae82ae192c41,
-	0x7db4b4eab4c95e75, 0xce54544d549a19a8, 0x7f93937693ece53b, 0x2f222288220daa44,
-	0x6364648d6407e9c8, 0x2af1f1e3f1db12ff, 0xcc7373d173bfa2e6, 0x8212124812905a24,
-	0x7a40401d403a5d80, 0x4808082008402810, 0x95c3c32bc356e89b, 0xdfecec97ec337bc5,
-	0x4ddbdb4bdb9690ab, 0xc0a1a1bea1611f5f, 0x918d8d0e8d1c8307, 0xc83d3df43df5c97a,
-	0x5b97976697ccf133, 0x0000000000000000, 0xf9cfcf1bcf36d483, 0x6e2b2bac2b458756,
-	0xe17676c57697b3ec, 0xe68282328264b019, 0x28d6d67fd6fea9b1, 0xc31b1b6c1bd87736,
-	0x74b5b5eeb5c15b77, 0xbeafaf86af112943, 0x1d6a6ab56a77dfd4, 0xea50505d50ba0da0,
-	0x5745450945124c8a, 0x38f3f3ebf3cb18fb, 0xad3030c0309df060, 0xc4efef9bef2b74c3,
-	0xda3f3ffc3fe5c37e, 0xc755554955921caa, 0xdba2a2b2a2791059, 0xe9eaea8fea0365c9,
-	0x6a656589650fecca, 0x03babad2bab96869, 0x4a2f2fbc2f65935e, 0x8ec0c027c04ee79d,
-	0x60dede5fdebe81a1, 0xfc1c1c701ce06c38, 0x46fdfdd3fdbb2ee7, 0x1f4d4d294d52649a,
-	0x7692927292e4e039, 0xfa7575c9758fbcea, 0x3606061806301e0c, 0xae8a8a128a249809,
-	0x4bb2b2f2b2f94079, 0x85e6e6bfe66359d1, 0x7e0e0e380e70361c, 0xe71f1f7c1ff8633e,
-	0x556262956237f7c4, 0x3ad4d477d4eea3b5, 0x81a8a89aa829324d, 0x5296966296c4f431,
-	0x62f9f9c3f99b3aef, 0xa3c5c533c566f697, 0x102525942535b14a, 0xab59597959f220b2,
-	0xd084842a8454ae15, 0xc57272d572b7a7e4, 0xec3939e439d5dd72, 0x164c4c2d4c5a6198,
-	0x945e5e655eca3bbc, 0x9f7878fd78e785f0, 0xe53838e038ddd870, 0x988c8c0a8c148605,
-	0x17d1d163d1c6b2bf, 0xe4a5a5aea5410b57, 0xa1e2e2afe2434dd9, 0x4e616199612ff8c2,
-	0x42b3b3f6b3f1457b, 0x342121842115a542, 0x089c9c4a9c94d625, 0xee1e1e781ef0663c,
-	0x6143431143225286, 0xb1c7c73bc776fc93, 0x4ffcfcd7fcb32be5, 0x2404041004201408,
-	0xe351515951b208a2, 0x2599995e99bcc72f, 0x226d6da96d4fc4da, 0x650d0d340d68391a,
-	0x79fafacffa8335e9, 0x69dfdf5bdfb684a3, 0xa97e7ee57ed79bfc, 0x19242490243db448,
-	0xfe3b3bec3bc5d776, 0x9aabab96ab313d4b, 0xf0cece1fce3ed181, 0x9911114411885522,
-	0x838f8f068f0c8903, 0x044e4e254e4a6b9c, 0x66b7b7e6b7d15173, 0xe0ebeb8beb0b60cb,
-	0xc13c3cf03cfdcc78, 0xfd81813e817cbf1f, 0x4094946a94d4fe35, 0x1cf7f7fbf7eb0cf3,
-	0x18b9b9deb9a1676f, 0x8b13134c13985f26, 0x512c2cb02c7d9c58, 0x05d3d36bd3d6b8bb,
-	0x8ce7e7bbe76b5cd3, 0x396e6ea56e57cbdc, 0xaac4c437c46ef395, 0x1b03030c03180f06,
-	0xdc565645568a13ac, 0x5e44440d441a4988, 0xa07f7fe17fdf9efe, 0x88a9a99ea921374f,
-	0x672a2aa82a4d8254, 0x0abbbbd6bbb16d6b, 0x87c1c123c146e29f, 0xf153535153a202a6,
-	0x72dcdc57dcae8ba5, 0x530b0b2c0b582716, 0x019d9d4e9d9cd327, 0x2b6c6cad6c47c1d8,
-	0xa43131c43195f562, 0xf37474cd7487b9e8, 0x15f6f6fff6e309f1, 0x4c464605460a438c,
-	0xa5acac8aac092645, 0xb589891e893c970f, 0xb414145014a04428, 0xbae1e1a3e15b42df,
-	0xa616165816b04e2c, 0xf73a3ae83acdd274, 0x066969b9696fd0d2, 0x4109092409482d12,
-	0xd77070dd70a7ade0, 0x6fb6b6e2b6d95471, 0x1ed0d067d0ceb7bd, 0xd6eded93ed3b7ec7,
-	0xe2cccc17cc2edb85, 0x68424215422a5784, 0x2c98985a98b4c22d, 0xeda4a4aaa4490e55,
-	0x752828a0285d8850, 0x865c5c6d5cda31b8, 0x6bf8f8c7f8933fed, 0xc28686228644a411,
-}
-
-C2 := [256]u64 {
-	0x30d818186018c078, 0x462623238c2305af, 0x91b8c6c63fc67ef9, 0xcdfbe8e887e8136f,
-	0x13cb878726874ca1, 0x6d11b8b8dab8a962, 0x0209010104010805, 0x9e0d4f4f214f426e,
-	0x6c9b3636d836adee, 0x51ffa6a6a2a65904, 0xb90cd2d26fd2debd, 0xf70ef5f5f3f5fb06,
-	0xf2967979f979ef80, 0xde306f6fa16f5fce, 0x3f6d91917e91fcef, 0xa4f852525552aa07,
-	0xc04760609d6027fd, 0x6535bcbccabc8976, 0x2b379b9b569baccd, 0x018a8e8e028e048c,
-	0x5bd2a3a3b6a37115, 0x186c0c0c300c603c, 0xf6847b7bf17bff8a, 0x6a803535d435b5e1,
-	0x3af51d1d741de869, 0xddb3e0e0a7e05347, 0xb321d7d77bd7f6ac, 0x999cc2c22fc25eed,
-	0x5c432e2eb82e6d96, 0x96294b4b314b627a, 0xe15dfefedffea321, 0xaed5575741578216,
-	0x2abd15155415a841, 0xeee87777c1779fb6, 0x6e923737dc37a5eb, 0xd79ee5e5b3e57b56,
-	0x23139f9f469f8cd9, 0xfd23f0f0e7f0d317, 0x94204a4a354a6a7f, 0xa944dada4fda9e95,
-	0xb0a258587d58fa25, 0x8fcfc9c903c906ca, 0x527c2929a429558d, 0x145a0a0a280a5022,
-	0x7f50b1b1feb1e14f, 0x5dc9a0a0baa0691a, 0xd6146b6bb16b7fda, 0x17d985852e855cab,
-	0x673cbdbdcebd8173, 0xba8f5d5d695dd234, 0x2090101040108050, 0xf507f4f4f7f4f303,
-	0x8bddcbcb0bcb16c0, 0x7cd33e3ef83eedc6, 0x0a2d050514052811, 0xce78676781671fe6,
-	0xd597e4e4b7e47353, 0x4e0227279c2725bb, 0x8273414119413258, 0x0ba78b8b168b2c9d,
-	0x53f6a7a7a6a75101, 0xfab27d7de97dcf94, 0x374995956e95dcfb, 0xad56d8d847d88e9f,
-	0xeb70fbfbcbfb8b30, 0xc1cdeeee9fee2371, 0xf8bb7c7ced7cc791, 0xcc716666856617e3,
-	0xa77bdddd53dda68e, 0x2eaf17175c17b84b, 0x8e45474701470246, 0x211a9e9e429e84dc,
-	0x89d4caca0fca1ec5, 0x5a582d2db42d7599, 0x632ebfbfc6bf9179, 0x0e3f07071c07381b,
-	0x47acadad8ead0123, 0xb4b05a5a755aea2f, 0x1bef838336836cb5, 0x66b63333cc3385ff,
-	0xc65c636391633ff2, 0x041202020802100a, 0x4993aaaa92aa3938, 0xe2de7171d971afa8,
-	0x8dc6c8c807c80ecf, 0x32d119196419c87d, 0x923b494939497270, 0xaf5fd9d943d9869a,
-	0xf931f2f2eff2c31d, 0xdba8e3e3abe34b48, 0xb6b95b5b715be22a, 0x0dbc88881a883492,
-	0x293e9a9a529aa4c8, 0x4c0b262698262dbe, 0x64bf3232c8328dfa, 0x7d59b0b0fab0e94a,
-	0xcff2e9e983e91b6a, 0x1e770f0f3c0f7833, 0xb733d5d573d5e6a6, 0x1df480803a8074ba,
-	0x6127bebec2be997c, 0x87ebcdcd13cd26de, 0x68893434d034bde4, 0x903248483d487a75,
-	0xe354ffffdbffab24, 0xf48d7a7af57af78f, 0x3d6490907a90f4ea, 0xbe9d5f5f615fc23e,
-	0x403d202080201da0, 0xd00f6868bd6867d5, 0x34ca1a1a681ad072, 0x41b7aeae82ae192c,
-	0x757db4b4eab4c95e, 0xa8ce54544d549a19, 0x3b7f93937693ece5, 0x442f222288220daa,
-	0xc86364648d6407e9, 0xff2af1f1e3f1db12, 0xe6cc7373d173bfa2, 0x248212124812905a,
-	0x807a40401d403a5d, 0x1048080820084028, 0x9b95c3c32bc356e8, 0xc5dfecec97ec337b,
-	0xab4ddbdb4bdb9690, 0x5fc0a1a1bea1611f, 0x07918d8d0e8d1c83, 0x7ac83d3df43df5c9,
-	0x335b97976697ccf1, 0x0000000000000000, 0x83f9cfcf1bcf36d4, 0x566e2b2bac2b4587,
-	0xece17676c57697b3, 0x19e68282328264b0, 0xb128d6d67fd6fea9, 0x36c31b1b6c1bd877,
-	0x7774b5b5eeb5c15b, 0x43beafaf86af1129, 0xd41d6a6ab56a77df, 0xa0ea50505d50ba0d,
-	0x8a5745450945124c, 0xfb38f3f3ebf3cb18, 0x60ad3030c0309df0, 0xc3c4efef9bef2b74,
-	0x7eda3f3ffc3fe5c3, 0xaac755554955921c, 0x59dba2a2b2a27910, 0xc9e9eaea8fea0365,
-	0xca6a656589650fec, 0x6903babad2bab968, 0x5e4a2f2fbc2f6593, 0x9d8ec0c027c04ee7,
-	0xa160dede5fdebe81, 0x38fc1c1c701ce06c, 0xe746fdfdd3fdbb2e, 0x9a1f4d4d294d5264,
-	0x397692927292e4e0, 0xeafa7575c9758fbc, 0x0c3606061806301e, 0x09ae8a8a128a2498,
-	0x794bb2b2f2b2f940, 0xd185e6e6bfe66359, 0x1c7e0e0e380e7036, 0x3ee71f1f7c1ff863,
-	0xc4556262956237f7, 0xb53ad4d477d4eea3, 0x4d81a8a89aa82932, 0x315296966296c4f4,
-	0xef62f9f9c3f99b3a, 0x97a3c5c533c566f6, 0x4a102525942535b1, 0xb2ab59597959f220,
-	0x15d084842a8454ae, 0xe4c57272d572b7a7, 0x72ec3939e439d5dd, 0x98164c4c2d4c5a61,
-	0xbc945e5e655eca3b, 0xf09f7878fd78e785, 0x70e53838e038ddd8, 0x05988c8c0a8c1486,
-	0xbf17d1d163d1c6b2, 0x57e4a5a5aea5410b, 0xd9a1e2e2afe2434d, 0xc24e616199612ff8,
-	0x7b42b3b3f6b3f145, 0x42342121842115a5, 0x25089c9c4a9c94d6, 0x3cee1e1e781ef066,
-	0x8661434311432252, 0x93b1c7c73bc776fc, 0xe54ffcfcd7fcb32b, 0x0824040410042014,
-	0xa2e351515951b208, 0x2f2599995e99bcc7, 0xda226d6da96d4fc4, 0x1a650d0d340d6839,
-	0xe979fafacffa8335, 0xa369dfdf5bdfb684, 0xfca97e7ee57ed79b, 0x4819242490243db4,
-	0x76fe3b3bec3bc5d7, 0x4b9aabab96ab313d, 0x81f0cece1fce3ed1, 0x2299111144118855,
-	0x03838f8f068f0c89, 0x9c044e4e254e4a6b, 0x7366b7b7e6b7d151, 0xcbe0ebeb8beb0b60,
-	0x78c13c3cf03cfdcc, 0x1ffd81813e817cbf, 0x354094946a94d4fe, 0xf31cf7f7fbf7eb0c,
-	0x6f18b9b9deb9a167, 0x268b13134c13985f, 0x58512c2cb02c7d9c, 0xbb05d3d36bd3d6b8,
-	0xd38ce7e7bbe76b5c, 0xdc396e6ea56e57cb, 0x95aac4c437c46ef3, 0x061b03030c03180f,
-	0xacdc565645568a13, 0x885e44440d441a49, 0xfea07f7fe17fdf9e, 0x4f88a9a99ea92137,
-	0x54672a2aa82a4d82, 0x6b0abbbbd6bbb16d, 0x9f87c1c123c146e2, 0xa6f153535153a202,
-	0xa572dcdc57dcae8b, 0x16530b0b2c0b5827, 0x27019d9d4e9d9cd3, 0xd82b6c6cad6c47c1,
-	0x62a43131c43195f5, 0xe8f37474cd7487b9, 0xf115f6f6fff6e309, 0x8c4c464605460a43,
-	0x45a5acac8aac0926, 0x0fb589891e893c97, 0x28b414145014a044, 0xdfbae1e1a3e15b42,
-	0x2ca616165816b04e, 0x74f73a3ae83acdd2, 0xd2066969b9696fd0, 0x124109092409482d,
-	0xe0d77070dd70a7ad, 0x716fb6b6e2b6d954, 0xbd1ed0d067d0ceb7, 0xc7d6eded93ed3b7e,
-	0x85e2cccc17cc2edb, 0x8468424215422a57, 0x2d2c98985a98b4c2, 0x55eda4a4aaa4490e,
-	0x50752828a0285d88, 0xb8865c5c6d5cda31, 0xed6bf8f8c7f8933f, 0x11c28686228644a4,
-}
-
-C3 := [256]u64 {
-	0x7830d818186018c0, 0xaf462623238c2305, 0xf991b8c6c63fc67e, 0x6fcdfbe8e887e813,
-	0xa113cb878726874c, 0x626d11b8b8dab8a9, 0x0502090101040108, 0x6e9e0d4f4f214f42,
-	0xee6c9b3636d836ad, 0x0451ffa6a6a2a659, 0xbdb90cd2d26fd2de, 0x06f70ef5f5f3f5fb,
-	0x80f2967979f979ef, 0xcede306f6fa16f5f, 0xef3f6d91917e91fc, 0x07a4f852525552aa,
-	0xfdc04760609d6027, 0x766535bcbccabc89, 0xcd2b379b9b569bac, 0x8c018a8e8e028e04,
-	0x155bd2a3a3b6a371, 0x3c186c0c0c300c60, 0x8af6847b7bf17bff, 0xe16a803535d435b5,
-	0x693af51d1d741de8, 0x47ddb3e0e0a7e053, 0xacb321d7d77bd7f6, 0xed999cc2c22fc25e,
-	0x965c432e2eb82e6d, 0x7a96294b4b314b62, 0x21e15dfefedffea3, 0x16aed55757415782,
-	0x412abd15155415a8, 0xb6eee87777c1779f, 0xeb6e923737dc37a5, 0x56d79ee5e5b3e57b,
-	0xd923139f9f469f8c, 0x17fd23f0f0e7f0d3, 0x7f94204a4a354a6a, 0x95a944dada4fda9e,
-	0x25b0a258587d58fa, 0xca8fcfc9c903c906, 0x8d527c2929a42955, 0x22145a0a0a280a50,
-	0x4f7f50b1b1feb1e1, 0x1a5dc9a0a0baa069, 0xdad6146b6bb16b7f, 0xab17d985852e855c,
-	0x73673cbdbdcebd81, 0x34ba8f5d5d695dd2, 0x5020901010401080, 0x03f507f4f4f7f4f3,
-	0xc08bddcbcb0bcb16, 0xc67cd33e3ef83eed, 0x110a2d0505140528, 0xe6ce78676781671f,
-	0x53d597e4e4b7e473, 0xbb4e0227279c2725, 0x5882734141194132, 0x9d0ba78b8b168b2c,
-	0x0153f6a7a7a6a751, 0x94fab27d7de97dcf, 0xfb374995956e95dc, 0x9fad56d8d847d88e,
-	0x30eb70fbfbcbfb8b, 0x71c1cdeeee9fee23, 0x91f8bb7c7ced7cc7, 0xe3cc716666856617,
-	0x8ea77bdddd53dda6, 0x4b2eaf17175c17b8, 0x468e454747014702, 0xdc211a9e9e429e84,
-	0xc589d4caca0fca1e, 0x995a582d2db42d75, 0x79632ebfbfc6bf91, 0x1b0e3f07071c0738,
-	0x2347acadad8ead01, 0x2fb4b05a5a755aea, 0xb51bef838336836c, 0xff66b63333cc3385,
-	0xf2c65c636391633f, 0x0a04120202080210, 0x384993aaaa92aa39, 0xa8e2de7171d971af,
-	0xcf8dc6c8c807c80e, 0x7d32d119196419c8, 0x70923b4949394972, 0x9aaf5fd9d943d986,
-	0x1df931f2f2eff2c3, 0x48dba8e3e3abe34b, 0x2ab6b95b5b715be2, 0x920dbc88881a8834,
-	0xc8293e9a9a529aa4, 0xbe4c0b262698262d, 0xfa64bf3232c8328d, 0x4a7d59b0b0fab0e9,
-	0x6acff2e9e983e91b, 0x331e770f0f3c0f78, 0xa6b733d5d573d5e6, 0xba1df480803a8074,
-	0x7c6127bebec2be99, 0xde87ebcdcd13cd26, 0xe468893434d034bd, 0x75903248483d487a,
-	0x24e354ffffdbffab, 0x8ff48d7a7af57af7, 0xea3d6490907a90f4, 0x3ebe9d5f5f615fc2,
-	0xa0403d202080201d, 0xd5d00f6868bd6867, 0x7234ca1a1a681ad0, 0x2c41b7aeae82ae19,
-	0x5e757db4b4eab4c9, 0x19a8ce54544d549a, 0xe53b7f93937693ec, 0xaa442f222288220d,
-	0xe9c86364648d6407, 0x12ff2af1f1e3f1db, 0xa2e6cc7373d173bf, 0x5a24821212481290,
-	0x5d807a40401d403a, 0x2810480808200840, 0xe89b95c3c32bc356, 0x7bc5dfecec97ec33,
-	0x90ab4ddbdb4bdb96, 0x1f5fc0a1a1bea161, 0x8307918d8d0e8d1c, 0xc97ac83d3df43df5,
-	0xf1335b97976697cc, 0x0000000000000000, 0xd483f9cfcf1bcf36, 0x87566e2b2bac2b45,
-	0xb3ece17676c57697, 0xb019e68282328264, 0xa9b128d6d67fd6fe, 0x7736c31b1b6c1bd8,
-	0x5b7774b5b5eeb5c1, 0x2943beafaf86af11, 0xdfd41d6a6ab56a77, 0x0da0ea50505d50ba,
-	0x4c8a574545094512, 0x18fb38f3f3ebf3cb, 0xf060ad3030c0309d, 0x74c3c4efef9bef2b,
-	0xc37eda3f3ffc3fe5, 0x1caac75555495592, 0x1059dba2a2b2a279, 0x65c9e9eaea8fea03,
-	0xecca6a656589650f, 0x686903babad2bab9, 0x935e4a2f2fbc2f65, 0xe79d8ec0c027c04e,
-	0x81a160dede5fdebe, 0x6c38fc1c1c701ce0, 0x2ee746fdfdd3fdbb, 0x649a1f4d4d294d52,
-	0xe0397692927292e4, 0xbceafa7575c9758f, 0x1e0c360606180630, 0x9809ae8a8a128a24,
-	0x40794bb2b2f2b2f9, 0x59d185e6e6bfe663, 0x361c7e0e0e380e70, 0x633ee71f1f7c1ff8,
-	0xf7c4556262956237, 0xa3b53ad4d477d4ee, 0x324d81a8a89aa829, 0xf4315296966296c4,
-	0x3aef62f9f9c3f99b, 0xf697a3c5c533c566, 0xb14a102525942535, 0x20b2ab59597959f2,
-	0xae15d084842a8454, 0xa7e4c57272d572b7, 0xdd72ec3939e439d5, 0x6198164c4c2d4c5a,
-	0x3bbc945e5e655eca, 0x85f09f7878fd78e7, 0xd870e53838e038dd, 0x8605988c8c0a8c14,
-	0xb2bf17d1d163d1c6, 0x0b57e4a5a5aea541, 0x4dd9a1e2e2afe243, 0xf8c24e616199612f,
-	0x457b42b3b3f6b3f1, 0xa542342121842115, 0xd625089c9c4a9c94, 0x663cee1e1e781ef0,
-	0x5286614343114322, 0xfc93b1c7c73bc776, 0x2be54ffcfcd7fcb3, 0x1408240404100420,
-	0x08a2e351515951b2, 0xc72f2599995e99bc, 0xc4da226d6da96d4f, 0x391a650d0d340d68,
-	0x35e979fafacffa83, 0x84a369dfdf5bdfb6, 0x9bfca97e7ee57ed7, 0xb44819242490243d,
-	0xd776fe3b3bec3bc5, 0x3d4b9aabab96ab31, 0xd181f0cece1fce3e, 0x5522991111441188,
-	0x8903838f8f068f0c, 0x6b9c044e4e254e4a, 0x517366b7b7e6b7d1, 0x60cbe0ebeb8beb0b,
-	0xcc78c13c3cf03cfd, 0xbf1ffd81813e817c, 0xfe354094946a94d4, 0x0cf31cf7f7fbf7eb,
-	0x676f18b9b9deb9a1, 0x5f268b13134c1398, 0x9c58512c2cb02c7d, 0xb8bb05d3d36bd3d6,
-	0x5cd38ce7e7bbe76b, 0xcbdc396e6ea56e57, 0xf395aac4c437c46e, 0x0f061b03030c0318,
-	0x13acdc565645568a, 0x49885e44440d441a, 0x9efea07f7fe17fdf, 0x374f88a9a99ea921,
-	0x8254672a2aa82a4d, 0x6d6b0abbbbd6bbb1, 0xe29f87c1c123c146, 0x02a6f153535153a2,
-	0x8ba572dcdc57dcae, 0x2716530b0b2c0b58, 0xd327019d9d4e9d9c, 0xc1d82b6c6cad6c47,
-	0xf562a43131c43195, 0xb9e8f37474cd7487, 0x09f115f6f6fff6e3, 0x438c4c464605460a,
-	0x2645a5acac8aac09, 0x970fb589891e893c, 0x4428b414145014a0, 0x42dfbae1e1a3e15b,
-	0x4e2ca616165816b0, 0xd274f73a3ae83acd, 0xd0d2066969b9696f, 0x2d12410909240948,
-	0xade0d77070dd70a7, 0x54716fb6b6e2b6d9, 0xb7bd1ed0d067d0ce, 0x7ec7d6eded93ed3b,
-	0xdb85e2cccc17cc2e, 0x578468424215422a, 0xc22d2c98985a98b4, 0x0e55eda4a4aaa449,
-	0x8850752828a0285d, 0x31b8865c5c6d5cda, 0x3fed6bf8f8c7f893, 0xa411c28686228644,
-}
-
-C4 := [256]u64 {
-	0xc07830d818186018, 0x05af462623238c23, 0x7ef991b8c6c63fc6, 0x136fcdfbe8e887e8,
-	0x4ca113cb87872687, 0xa9626d11b8b8dab8, 0x0805020901010401, 0x426e9e0d4f4f214f,
-	0xadee6c9b3636d836, 0x590451ffa6a6a2a6, 0xdebdb90cd2d26fd2, 0xfb06f70ef5f5f3f5,
-	0xef80f2967979f979, 0x5fcede306f6fa16f, 0xfcef3f6d91917e91, 0xaa07a4f852525552,
-	0x27fdc04760609d60, 0x89766535bcbccabc, 0xaccd2b379b9b569b, 0x048c018a8e8e028e,
-	0x71155bd2a3a3b6a3, 0x603c186c0c0c300c, 0xff8af6847b7bf17b, 0xb5e16a803535d435,
-	0xe8693af51d1d741d, 0x5347ddb3e0e0a7e0, 0xf6acb321d7d77bd7, 0x5eed999cc2c22fc2,
-	0x6d965c432e2eb82e, 0x627a96294b4b314b, 0xa321e15dfefedffe, 0x8216aed557574157,
-	0xa8412abd15155415, 0x9fb6eee87777c177, 0xa5eb6e923737dc37, 0x7b56d79ee5e5b3e5,
-	0x8cd923139f9f469f, 0xd317fd23f0f0e7f0, 0x6a7f94204a4a354a, 0x9e95a944dada4fda,
-	0xfa25b0a258587d58, 0x06ca8fcfc9c903c9, 0x558d527c2929a429, 0x5022145a0a0a280a,
-	0xe14f7f50b1b1feb1, 0x691a5dc9a0a0baa0, 0x7fdad6146b6bb16b, 0x5cab17d985852e85,
-	0x8173673cbdbdcebd, 0xd234ba8f5d5d695d, 0x8050209010104010, 0xf303f507f4f4f7f4,
-	0x16c08bddcbcb0bcb, 0xedc67cd33e3ef83e, 0x28110a2d05051405, 0x1fe6ce7867678167,
-	0x7353d597e4e4b7e4, 0x25bb4e0227279c27, 0x3258827341411941, 0x2c9d0ba78b8b168b,
-	0x510153f6a7a7a6a7, 0xcf94fab27d7de97d, 0xdcfb374995956e95, 0x8e9fad56d8d847d8,
-	0x8b30eb70fbfbcbfb, 0x2371c1cdeeee9fee, 0xc791f8bb7c7ced7c, 0x17e3cc7166668566,
-	0xa68ea77bdddd53dd, 0xb84b2eaf17175c17, 0x02468e4547470147, 0x84dc211a9e9e429e,
-	0x1ec589d4caca0fca, 0x75995a582d2db42d, 0x9179632ebfbfc6bf, 0x381b0e3f07071c07,
-	0x012347acadad8ead, 0xea2fb4b05a5a755a, 0x6cb51bef83833683, 0x85ff66b63333cc33,
-	0x3ff2c65c63639163, 0x100a041202020802, 0x39384993aaaa92aa, 0xafa8e2de7171d971,
-	0x0ecf8dc6c8c807c8, 0xc87d32d119196419, 0x7270923b49493949, 0x869aaf5fd9d943d9,
-	0xc31df931f2f2eff2, 0x4b48dba8e3e3abe3, 0xe22ab6b95b5b715b, 0x34920dbc88881a88,
-	0xa4c8293e9a9a529a, 0x2dbe4c0b26269826, 0x8dfa64bf3232c832, 0xe94a7d59b0b0fab0,
-	0x1b6acff2e9e983e9, 0x78331e770f0f3c0f, 0xe6a6b733d5d573d5, 0x74ba1df480803a80,
-	0x997c6127bebec2be, 0x26de87ebcdcd13cd, 0xbde468893434d034, 0x7a75903248483d48,
-	0xab24e354ffffdbff, 0xf78ff48d7a7af57a, 0xf4ea3d6490907a90, 0xc23ebe9d5f5f615f,
-	0x1da0403d20208020, 0x67d5d00f6868bd68, 0xd07234ca1a1a681a, 0x192c41b7aeae82ae,
-	0xc95e757db4b4eab4, 0x9a19a8ce54544d54, 0xece53b7f93937693, 0x0daa442f22228822,
-	0x07e9c86364648d64, 0xdb12ff2af1f1e3f1, 0xbfa2e6cc7373d173, 0x905a248212124812,
-	0x3a5d807a40401d40, 0x4028104808082008, 0x56e89b95c3c32bc3, 0x337bc5dfecec97ec,
-	0x9690ab4ddbdb4bdb, 0x611f5fc0a1a1bea1, 0x1c8307918d8d0e8d, 0xf5c97ac83d3df43d,
-	0xccf1335b97976697, 0x0000000000000000, 0x36d483f9cfcf1bcf, 0x4587566e2b2bac2b,
-	0x97b3ece17676c576, 0x64b019e682823282, 0xfea9b128d6d67fd6, 0xd87736c31b1b6c1b,
-	0xc15b7774b5b5eeb5, 0x112943beafaf86af, 0x77dfd41d6a6ab56a, 0xba0da0ea50505d50,
-	0x124c8a5745450945, 0xcb18fb38f3f3ebf3, 0x9df060ad3030c030, 0x2b74c3c4efef9bef,
-	0xe5c37eda3f3ffc3f, 0x921caac755554955, 0x791059dba2a2b2a2, 0x0365c9e9eaea8fea,
-	0x0fecca6a65658965, 0xb9686903babad2ba, 0x65935e4a2f2fbc2f, 0x4ee79d8ec0c027c0,
-	0xbe81a160dede5fde, 0xe06c38fc1c1c701c, 0xbb2ee746fdfdd3fd, 0x52649a1f4d4d294d,
-	0xe4e0397692927292, 0x8fbceafa7575c975, 0x301e0c3606061806, 0x249809ae8a8a128a,
-	0xf940794bb2b2f2b2, 0x6359d185e6e6bfe6, 0x70361c7e0e0e380e, 0xf8633ee71f1f7c1f,
-	0x37f7c45562629562, 0xeea3b53ad4d477d4, 0x29324d81a8a89aa8, 0xc4f4315296966296,
-	0x9b3aef62f9f9c3f9, 0x66f697a3c5c533c5, 0x35b14a1025259425, 0xf220b2ab59597959,
-	0x54ae15d084842a84, 0xb7a7e4c57272d572, 0xd5dd72ec3939e439, 0x5a6198164c4c2d4c,
-	0xca3bbc945e5e655e, 0xe785f09f7878fd78, 0xddd870e53838e038, 0x148605988c8c0a8c,
-	0xc6b2bf17d1d163d1, 0x410b57e4a5a5aea5, 0x434dd9a1e2e2afe2, 0x2ff8c24e61619961,
-	0xf1457b42b3b3f6b3, 0x15a5423421218421, 0x94d625089c9c4a9c, 0xf0663cee1e1e781e,
-	0x2252866143431143, 0x76fc93b1c7c73bc7, 0xb32be54ffcfcd7fc, 0x2014082404041004,
-	0xb208a2e351515951, 0xbcc72f2599995e99, 0x4fc4da226d6da96d, 0x68391a650d0d340d,
-	0x8335e979fafacffa, 0xb684a369dfdf5bdf, 0xd79bfca97e7ee57e, 0x3db4481924249024,
-	0xc5d776fe3b3bec3b, 0x313d4b9aabab96ab, 0x3ed181f0cece1fce, 0x8855229911114411,
-	0x0c8903838f8f068f, 0x4a6b9c044e4e254e, 0xd1517366b7b7e6b7, 0x0b60cbe0ebeb8beb,
-	0xfdcc78c13c3cf03c, 0x7cbf1ffd81813e81, 0xd4fe354094946a94, 0xeb0cf31cf7f7fbf7,
-	0xa1676f18b9b9deb9, 0x985f268b13134c13, 0x7d9c58512c2cb02c, 0xd6b8bb05d3d36bd3,
-	0x6b5cd38ce7e7bbe7, 0x57cbdc396e6ea56e, 0x6ef395aac4c437c4, 0x180f061b03030c03,
-	0x8a13acdc56564556, 0x1a49885e44440d44, 0xdf9efea07f7fe17f, 0x21374f88a9a99ea9,
-	0x4d8254672a2aa82a, 0xb16d6b0abbbbd6bb, 0x46e29f87c1c123c1, 0xa202a6f153535153,
-	0xae8ba572dcdc57dc, 0x582716530b0b2c0b, 0x9cd327019d9d4e9d, 0x47c1d82b6c6cad6c,
-	0x95f562a43131c431, 0x87b9e8f37474cd74, 0xe309f115f6f6fff6, 0x0a438c4c46460546,
-	0x092645a5acac8aac, 0x3c970fb589891e89, 0xa04428b414145014, 0x5b42dfbae1e1a3e1,
-	0xb04e2ca616165816, 0xcdd274f73a3ae83a, 0x6fd0d2066969b969, 0x482d124109092409,
-	0xa7ade0d77070dd70, 0xd954716fb6b6e2b6, 0xceb7bd1ed0d067d0, 0x3b7ec7d6eded93ed,
-	0x2edb85e2cccc17cc, 0x2a57846842421542, 0xb4c22d2c98985a98, 0x490e55eda4a4aaa4,
-	0x5d8850752828a028, 0xda31b8865c5c6d5c, 0x933fed6bf8f8c7f8, 0x44a411c286862286,
-}
-
-C5 := [256]u64 {
-	0x18c07830d8181860, 0x2305af462623238c, 0xc67ef991b8c6c63f, 0xe8136fcdfbe8e887,
-	0x874ca113cb878726, 0xb8a9626d11b8b8da, 0x0108050209010104, 0x4f426e9e0d4f4f21,
-	0x36adee6c9b3636d8, 0xa6590451ffa6a6a2, 0xd2debdb90cd2d26f, 0xf5fb06f70ef5f5f3,
-	0x79ef80f2967979f9, 0x6f5fcede306f6fa1, 0x91fcef3f6d91917e, 0x52aa07a4f8525255,
-	0x6027fdc04760609d, 0xbc89766535bcbcca, 0x9baccd2b379b9b56, 0x8e048c018a8e8e02,
-	0xa371155bd2a3a3b6, 0x0c603c186c0c0c30, 0x7bff8af6847b7bf1, 0x35b5e16a803535d4,
-	0x1de8693af51d1d74, 0xe05347ddb3e0e0a7, 0xd7f6acb321d7d77b, 0xc25eed999cc2c22f,
-	0x2e6d965c432e2eb8, 0x4b627a96294b4b31, 0xfea321e15dfefedf, 0x578216aed5575741,
-	0x15a8412abd151554, 0x779fb6eee87777c1, 0x37a5eb6e923737dc, 0xe57b56d79ee5e5b3,
-	0x9f8cd923139f9f46, 0xf0d317fd23f0f0e7, 0x4a6a7f94204a4a35, 0xda9e95a944dada4f,
-	0x58fa25b0a258587d, 0xc906ca8fcfc9c903, 0x29558d527c2929a4, 0x0a5022145a0a0a28,
-	0xb1e14f7f50b1b1fe, 0xa0691a5dc9a0a0ba, 0x6b7fdad6146b6bb1, 0x855cab17d985852e,
-	0xbd8173673cbdbdce, 0x5dd234ba8f5d5d69, 0x1080502090101040, 0xf4f303f507f4f4f7,
-	0xcb16c08bddcbcb0b, 0x3eedc67cd33e3ef8, 0x0528110a2d050514, 0x671fe6ce78676781,
-	0xe47353d597e4e4b7, 0x2725bb4e0227279c, 0x4132588273414119, 0x8b2c9d0ba78b8b16,
-	0xa7510153f6a7a7a6, 0x7dcf94fab27d7de9, 0x95dcfb374995956e, 0xd88e9fad56d8d847,
-	0xfb8b30eb70fbfbcb, 0xee2371c1cdeeee9f, 0x7cc791f8bb7c7ced, 0x6617e3cc71666685,
-	0xdda68ea77bdddd53, 0x17b84b2eaf17175c, 0x4702468e45474701, 0x9e84dc211a9e9e42,
-	0xca1ec589d4caca0f, 0x2d75995a582d2db4, 0xbf9179632ebfbfc6, 0x07381b0e3f07071c,
-	0xad012347acadad8e, 0x5aea2fb4b05a5a75, 0x836cb51bef838336, 0x3385ff66b63333cc,
-	0x633ff2c65c636391, 0x02100a0412020208, 0xaa39384993aaaa92, 0x71afa8e2de7171d9,
-	0xc80ecf8dc6c8c807, 0x19c87d32d1191964, 0x497270923b494939, 0xd9869aaf5fd9d943,
-	0xf2c31df931f2f2ef, 0xe34b48dba8e3e3ab, 0x5be22ab6b95b5b71, 0x8834920dbc88881a,
-	0x9aa4c8293e9a9a52, 0x262dbe4c0b262698, 0x328dfa64bf3232c8, 0xb0e94a7d59b0b0fa,
-	0xe91b6acff2e9e983, 0x0f78331e770f0f3c, 0xd5e6a6b733d5d573, 0x8074ba1df480803a,
-	0xbe997c6127bebec2, 0xcd26de87ebcdcd13, 0x34bde468893434d0, 0x487a75903248483d,
-	0xffab24e354ffffdb, 0x7af78ff48d7a7af5, 0x90f4ea3d6490907a, 0x5fc23ebe9d5f5f61,
-	0x201da0403d202080, 0x6867d5d00f6868bd, 0x1ad07234ca1a1a68, 0xae192c41b7aeae82,
-	0xb4c95e757db4b4ea, 0x549a19a8ce54544d, 0x93ece53b7f939376, 0x220daa442f222288,
-	0x6407e9c86364648d, 0xf1db12ff2af1f1e3, 0x73bfa2e6cc7373d1, 0x12905a2482121248,
-	0x403a5d807a40401d, 0x0840281048080820, 0xc356e89b95c3c32b, 0xec337bc5dfecec97,
-	0xdb9690ab4ddbdb4b, 0xa1611f5fc0a1a1be, 0x8d1c8307918d8d0e, 0x3df5c97ac83d3df4,
-	0x97ccf1335b979766, 0x0000000000000000, 0xcf36d483f9cfcf1b, 0x2b4587566e2b2bac,
-	0x7697b3ece17676c5, 0x8264b019e6828232, 0xd6fea9b128d6d67f, 0x1bd87736c31b1b6c,
-	0xb5c15b7774b5b5ee, 0xaf112943beafaf86, 0x6a77dfd41d6a6ab5, 0x50ba0da0ea50505d,
-	0x45124c8a57454509, 0xf3cb18fb38f3f3eb, 0x309df060ad3030c0, 0xef2b74c3c4efef9b,
-	0x3fe5c37eda3f3ffc, 0x55921caac7555549, 0xa2791059dba2a2b2, 0xea0365c9e9eaea8f,
-	0x650fecca6a656589, 0xbab9686903babad2, 0x2f65935e4a2f2fbc, 0xc04ee79d8ec0c027,
-	0xdebe81a160dede5f, 0x1ce06c38fc1c1c70, 0xfdbb2ee746fdfdd3, 0x4d52649a1f4d4d29,
-	0x92e4e03976929272, 0x758fbceafa7575c9, 0x06301e0c36060618, 0x8a249809ae8a8a12,
-	0xb2f940794bb2b2f2, 0xe66359d185e6e6bf, 0x0e70361c7e0e0e38, 0x1ff8633ee71f1f7c,
-	0x6237f7c455626295, 0xd4eea3b53ad4d477, 0xa829324d81a8a89a, 0x96c4f43152969662,
-	0xf99b3aef62f9f9c3, 0xc566f697a3c5c533, 0x2535b14a10252594, 0x59f220b2ab595979,
-	0x8454ae15d084842a, 0x72b7a7e4c57272d5, 0x39d5dd72ec3939e4, 0x4c5a6198164c4c2d,
-	0x5eca3bbc945e5e65, 0x78e785f09f7878fd, 0x38ddd870e53838e0, 0x8c148605988c8c0a,
-	0xd1c6b2bf17d1d163, 0xa5410b57e4a5a5ae, 0xe2434dd9a1e2e2af, 0x612ff8c24e616199,
-	0xb3f1457b42b3b3f6, 0x2115a54234212184, 0x9c94d625089c9c4a, 0x1ef0663cee1e1e78,
-	0x4322528661434311, 0xc776fc93b1c7c73b, 0xfcb32be54ffcfcd7, 0x0420140824040410,
-	0x51b208a2e3515159, 0x99bcc72f2599995e, 0x6d4fc4da226d6da9, 0x0d68391a650d0d34,
-	0xfa8335e979fafacf, 0xdfb684a369dfdf5b, 0x7ed79bfca97e7ee5, 0x243db44819242490,
-	0x3bc5d776fe3b3bec, 0xab313d4b9aabab96, 0xce3ed181f0cece1f, 0x1188552299111144,
-	0x8f0c8903838f8f06, 0x4e4a6b9c044e4e25, 0xb7d1517366b7b7e6, 0xeb0b60cbe0ebeb8b,
-	0x3cfdcc78c13c3cf0, 0x817cbf1ffd81813e, 0x94d4fe354094946a, 0xf7eb0cf31cf7f7fb,
-	0xb9a1676f18b9b9de, 0x13985f268b13134c, 0x2c7d9c58512c2cb0, 0xd3d6b8bb05d3d36b,
-	0xe76b5cd38ce7e7bb, 0x6e57cbdc396e6ea5, 0xc46ef395aac4c437, 0x03180f061b03030c,
-	0x568a13acdc565645, 0x441a49885e44440d, 0x7fdf9efea07f7fe1, 0xa921374f88a9a99e,
-	0x2a4d8254672a2aa8, 0xbbb16d6b0abbbbd6, 0xc146e29f87c1c123, 0x53a202a6f1535351,
-	0xdcae8ba572dcdc57, 0x0b582716530b0b2c, 0x9d9cd327019d9d4e, 0x6c47c1d82b6c6cad,
-	0x3195f562a43131c4, 0x7487b9e8f37474cd, 0xf6e309f115f6f6ff, 0x460a438c4c464605,
-	0xac092645a5acac8a, 0x893c970fb589891e, 0x14a04428b4141450, 0xe15b42dfbae1e1a3,
-	0x16b04e2ca6161658, 0x3acdd274f73a3ae8, 0x696fd0d2066969b9, 0x09482d1241090924,
-	0x70a7ade0d77070dd, 0xb6d954716fb6b6e2, 0xd0ceb7bd1ed0d067, 0xed3b7ec7d6eded93,
-	0xcc2edb85e2cccc17, 0x422a578468424215, 0x98b4c22d2c98985a, 0xa4490e55eda4a4aa,
-	0x285d8850752828a0, 0x5cda31b8865c5c6d, 0xf8933fed6bf8f8c7, 0x8644a411c2868622,
-}
-
-C6 := [256]u64 {
-	0x6018c07830d81818, 0x8c2305af46262323, 0x3fc67ef991b8c6c6, 0x87e8136fcdfbe8e8,
-	0x26874ca113cb8787, 0xdab8a9626d11b8b8, 0x0401080502090101, 0x214f426e9e0d4f4f,
-	0xd836adee6c9b3636, 0xa2a6590451ffa6a6, 0x6fd2debdb90cd2d2, 0xf3f5fb06f70ef5f5,
-	0xf979ef80f2967979, 0xa16f5fcede306f6f, 0x7e91fcef3f6d9191, 0x5552aa07a4f85252,
-	0x9d6027fdc0476060, 0xcabc89766535bcbc, 0x569baccd2b379b9b, 0x028e048c018a8e8e,
-	0xb6a371155bd2a3a3, 0x300c603c186c0c0c, 0xf17bff8af6847b7b, 0xd435b5e16a803535,
-	0x741de8693af51d1d, 0xa7e05347ddb3e0e0, 0x7bd7f6acb321d7d7, 0x2fc25eed999cc2c2,
-	0xb82e6d965c432e2e, 0x314b627a96294b4b, 0xdffea321e15dfefe, 0x41578216aed55757,
-	0x5415a8412abd1515, 0xc1779fb6eee87777, 0xdc37a5eb6e923737, 0xb3e57b56d79ee5e5,
-	0x469f8cd923139f9f, 0xe7f0d317fd23f0f0, 0x354a6a7f94204a4a, 0x4fda9e95a944dada,
-	0x7d58fa25b0a25858, 0x03c906ca8fcfc9c9, 0xa429558d527c2929, 0x280a5022145a0a0a,
-	0xfeb1e14f7f50b1b1, 0xbaa0691a5dc9a0a0, 0xb16b7fdad6146b6b, 0x2e855cab17d98585,
-	0xcebd8173673cbdbd, 0x695dd234ba8f5d5d, 0x4010805020901010, 0xf7f4f303f507f4f4,
-	0x0bcb16c08bddcbcb, 0xf83eedc67cd33e3e, 0x140528110a2d0505, 0x81671fe6ce786767,
-	0xb7e47353d597e4e4, 0x9c2725bb4e022727, 0x1941325882734141, 0x168b2c9d0ba78b8b,
-	0xa6a7510153f6a7a7, 0xe97dcf94fab27d7d, 0x6e95dcfb37499595, 0x47d88e9fad56d8d8,
-	0xcbfb8b30eb70fbfb, 0x9fee2371c1cdeeee, 0xed7cc791f8bb7c7c, 0x856617e3cc716666,
-	0x53dda68ea77bdddd, 0x5c17b84b2eaf1717, 0x014702468e454747, 0x429e84dc211a9e9e,
-	0x0fca1ec589d4caca, 0xb42d75995a582d2d, 0xc6bf9179632ebfbf, 0x1c07381b0e3f0707,
-	0x8ead012347acadad, 0x755aea2fb4b05a5a, 0x36836cb51bef8383, 0xcc3385ff66b63333,
-	0x91633ff2c65c6363, 0x0802100a04120202, 0x92aa39384993aaaa, 0xd971afa8e2de7171,
-	0x07c80ecf8dc6c8c8, 0x6419c87d32d11919, 0x39497270923b4949, 0x43d9869aaf5fd9d9,
-	0xeff2c31df931f2f2, 0xabe34b48dba8e3e3, 0x715be22ab6b95b5b, 0x1a8834920dbc8888,
-	0x529aa4c8293e9a9a, 0x98262dbe4c0b2626, 0xc8328dfa64bf3232, 0xfab0e94a7d59b0b0,
-	0x83e91b6acff2e9e9, 0x3c0f78331e770f0f, 0x73d5e6a6b733d5d5, 0x3a8074ba1df48080,
-	0xc2be997c6127bebe, 0x13cd26de87ebcdcd, 0xd034bde468893434, 0x3d487a7590324848,
-	0xdbffab24e354ffff, 0xf57af78ff48d7a7a, 0x7a90f4ea3d649090, 0x615fc23ebe9d5f5f,
-	0x80201da0403d2020, 0xbd6867d5d00f6868, 0x681ad07234ca1a1a, 0x82ae192c41b7aeae,
-	0xeab4c95e757db4b4, 0x4d549a19a8ce5454, 0x7693ece53b7f9393, 0x88220daa442f2222,
-	0x8d6407e9c8636464, 0xe3f1db12ff2af1f1, 0xd173bfa2e6cc7373, 0x4812905a24821212,
-	0x1d403a5d807a4040, 0x2008402810480808, 0x2bc356e89b95c3c3, 0x97ec337bc5dfecec,
-	0x4bdb9690ab4ddbdb, 0xbea1611f5fc0a1a1, 0x0e8d1c8307918d8d, 0xf43df5c97ac83d3d,
-	0x6697ccf1335b9797, 0x0000000000000000, 0x1bcf36d483f9cfcf, 0xac2b4587566e2b2b,
-	0xc57697b3ece17676, 0x328264b019e68282, 0x7fd6fea9b128d6d6, 0x6c1bd87736c31b1b,
-	0xeeb5c15b7774b5b5, 0x86af112943beafaf, 0xb56a77dfd41d6a6a, 0x5d50ba0da0ea5050,
-	0x0945124c8a574545, 0xebf3cb18fb38f3f3, 0xc0309df060ad3030, 0x9bef2b74c3c4efef,
-	0xfc3fe5c37eda3f3f, 0x4955921caac75555, 0xb2a2791059dba2a2, 0x8fea0365c9e9eaea,
-	0x89650fecca6a6565, 0xd2bab9686903baba, 0xbc2f65935e4a2f2f, 0x27c04ee79d8ec0c0,
-	0x5fdebe81a160dede, 0x701ce06c38fc1c1c, 0xd3fdbb2ee746fdfd, 0x294d52649a1f4d4d,
-	0x7292e4e039769292, 0xc9758fbceafa7575, 0x1806301e0c360606, 0x128a249809ae8a8a,
-	0xf2b2f940794bb2b2, 0xbfe66359d185e6e6, 0x380e70361c7e0e0e, 0x7c1ff8633ee71f1f,
-	0x956237f7c4556262, 0x77d4eea3b53ad4d4, 0x9aa829324d81a8a8, 0x6296c4f431529696,
-	0xc3f99b3aef62f9f9, 0x33c566f697a3c5c5, 0x942535b14a102525, 0x7959f220b2ab5959,
-	0x2a8454ae15d08484, 0xd572b7a7e4c57272, 0xe439d5dd72ec3939, 0x2d4c5a6198164c4c,
-	0x655eca3bbc945e5e, 0xfd78e785f09f7878, 0xe038ddd870e53838, 0x0a8c148605988c8c,
-	0x63d1c6b2bf17d1d1, 0xaea5410b57e4a5a5, 0xafe2434dd9a1e2e2, 0x99612ff8c24e6161,
-	0xf6b3f1457b42b3b3, 0x842115a542342121, 0x4a9c94d625089c9c, 0x781ef0663cee1e1e,
-	0x1143225286614343, 0x3bc776fc93b1c7c7, 0xd7fcb32be54ffcfc, 0x1004201408240404,
-	0x5951b208a2e35151, 0x5e99bcc72f259999, 0xa96d4fc4da226d6d, 0x340d68391a650d0d,
-	0xcffa8335e979fafa, 0x5bdfb684a369dfdf, 0xe57ed79bfca97e7e, 0x90243db448192424,
-	0xec3bc5d776fe3b3b, 0x96ab313d4b9aabab, 0x1fce3ed181f0cece, 0x4411885522991111,
-	0x068f0c8903838f8f, 0x254e4a6b9c044e4e, 0xe6b7d1517366b7b7, 0x8beb0b60cbe0ebeb,
-	0xf03cfdcc78c13c3c, 0x3e817cbf1ffd8181, 0x6a94d4fe35409494, 0xfbf7eb0cf31cf7f7,
-	0xdeb9a1676f18b9b9, 0x4c13985f268b1313, 0xb02c7d9c58512c2c, 0x6bd3d6b8bb05d3d3,
-	0xbbe76b5cd38ce7e7, 0xa56e57cbdc396e6e, 0x37c46ef395aac4c4, 0x0c03180f061b0303,
-	0x45568a13acdc5656, 0x0d441a49885e4444, 0xe17fdf9efea07f7f, 0x9ea921374f88a9a9,
-	0xa82a4d8254672a2a, 0xd6bbb16d6b0abbbb, 0x23c146e29f87c1c1, 0x5153a202a6f15353,
-	0x57dcae8ba572dcdc, 0x2c0b582716530b0b, 0x4e9d9cd327019d9d, 0xad6c47c1d82b6c6c,
-	0xc43195f562a43131, 0xcd7487b9e8f37474, 0xfff6e309f115f6f6, 0x05460a438c4c4646,
-	0x8aac092645a5acac, 0x1e893c970fb58989, 0x5014a04428b41414, 0xa3e15b42dfbae1e1,
-	0x5816b04e2ca61616, 0xe83acdd274f73a3a, 0xb9696fd0d2066969, 0x2409482d12410909,
-	0xdd70a7ade0d77070, 0xe2b6d954716fb6b6, 0x67d0ceb7bd1ed0d0, 0x93ed3b7ec7d6eded,
-	0x17cc2edb85e2cccc, 0x15422a5784684242, 0x5a98b4c22d2c9898, 0xaaa4490e55eda4a4,
-	0xa0285d8850752828, 0x6d5cda31b8865c5c, 0xc7f8933fed6bf8f8, 0x228644a411c28686,
-}
-
-C7 := [256]u64 {
-	0x186018c07830d818, 0x238c2305af462623, 0xc63fc67ef991b8c6, 0xe887e8136fcdfbe8,
-	0x8726874ca113cb87, 0xb8dab8a9626d11b8, 0x0104010805020901, 0x4f214f426e9e0d4f,
-	0x36d836adee6c9b36, 0xa6a2a6590451ffa6, 0xd26fd2debdb90cd2, 0xf5f3f5fb06f70ef5,
-	0x79f979ef80f29679, 0x6fa16f5fcede306f, 0x917e91fcef3f6d91, 0x525552aa07a4f852,
-	0x609d6027fdc04760, 0xbccabc89766535bc, 0x9b569baccd2b379b, 0x8e028e048c018a8e,
-	0xa3b6a371155bd2a3, 0x0c300c603c186c0c, 0x7bf17bff8af6847b, 0x35d435b5e16a8035,
-	0x1d741de8693af51d, 0xe0a7e05347ddb3e0, 0xd77bd7f6acb321d7, 0xc22fc25eed999cc2,
-	0x2eb82e6d965c432e, 0x4b314b627a96294b, 0xfedffea321e15dfe, 0x5741578216aed557,
-	0x155415a8412abd15, 0x77c1779fb6eee877, 0x37dc37a5eb6e9237, 0xe5b3e57b56d79ee5,
-	0x9f469f8cd923139f, 0xf0e7f0d317fd23f0, 0x4a354a6a7f94204a, 0xda4fda9e95a944da,
-	0x587d58fa25b0a258, 0xc903c906ca8fcfc9, 0x29a429558d527c29, 0x0a280a5022145a0a,
-	0xb1feb1e14f7f50b1, 0xa0baa0691a5dc9a0, 0x6bb16b7fdad6146b, 0x852e855cab17d985,
-	0xbdcebd8173673cbd, 0x5d695dd234ba8f5d, 0x1040108050209010, 0xf4f7f4f303f507f4,
-	0xcb0bcb16c08bddcb, 0x3ef83eedc67cd33e, 0x05140528110a2d05, 0x6781671fe6ce7867,
-	0xe4b7e47353d597e4, 0x279c2725bb4e0227, 0x4119413258827341, 0x8b168b2c9d0ba78b,
-	0xa7a6a7510153f6a7, 0x7de97dcf94fab27d, 0x956e95dcfb374995, 0xd847d88e9fad56d8,
-	0xfbcbfb8b30eb70fb, 0xee9fee2371c1cdee, 0x7ced7cc791f8bb7c, 0x66856617e3cc7166,
-	0xdd53dda68ea77bdd, 0x175c17b84b2eaf17, 0x47014702468e4547, 0x9e429e84dc211a9e,
-	0xca0fca1ec589d4ca, 0x2db42d75995a582d, 0xbfc6bf9179632ebf, 0x071c07381b0e3f07,
-	0xad8ead012347acad, 0x5a755aea2fb4b05a, 0x8336836cb51bef83, 0x33cc3385ff66b633,
-	0x6391633ff2c65c63, 0x020802100a041202, 0xaa92aa39384993aa, 0x71d971afa8e2de71,
-	0xc807c80ecf8dc6c8, 0x196419c87d32d119, 0x4939497270923b49, 0xd943d9869aaf5fd9,
-	0xf2eff2c31df931f2, 0xe3abe34b48dba8e3, 0x5b715be22ab6b95b, 0x881a8834920dbc88,
-	0x9a529aa4c8293e9a, 0x2698262dbe4c0b26, 0x32c8328dfa64bf32, 0xb0fab0e94a7d59b0,
-	0xe983e91b6acff2e9, 0x0f3c0f78331e770f, 0xd573d5e6a6b733d5, 0x803a8074ba1df480,
-	0xbec2be997c6127be, 0xcd13cd26de87ebcd, 0x34d034bde4688934, 0x483d487a75903248,
-	0xffdbffab24e354ff, 0x7af57af78ff48d7a, 0x907a90f4ea3d6490, 0x5f615fc23ebe9d5f,
-	0x2080201da0403d20, 0x68bd6867d5d00f68, 0x1a681ad07234ca1a, 0xae82ae192c41b7ae,
-	0xb4eab4c95e757db4, 0x544d549a19a8ce54, 0x937693ece53b7f93, 0x2288220daa442f22,
-	0x648d6407e9c86364, 0xf1e3f1db12ff2af1, 0x73d173bfa2e6cc73, 0x124812905a248212,
-	0x401d403a5d807a40, 0x0820084028104808, 0xc32bc356e89b95c3, 0xec97ec337bc5dfec,
-	0xdb4bdb9690ab4ddb, 0xa1bea1611f5fc0a1, 0x8d0e8d1c8307918d, 0x3df43df5c97ac83d,
-	0x976697ccf1335b97, 0x0000000000000000, 0xcf1bcf36d483f9cf, 0x2bac2b4587566e2b,
-	0x76c57697b3ece176, 0x82328264b019e682, 0xd67fd6fea9b128d6, 0x1b6c1bd87736c31b,
-	0xb5eeb5c15b7774b5, 0xaf86af112943beaf, 0x6ab56a77dfd41d6a, 0x505d50ba0da0ea50,
-	0x450945124c8a5745, 0xf3ebf3cb18fb38f3, 0x30c0309df060ad30, 0xef9bef2b74c3c4ef,
-	0x3ffc3fe5c37eda3f, 0x554955921caac755, 0xa2b2a2791059dba2, 0xea8fea0365c9e9ea,
-	0x6589650fecca6a65, 0xbad2bab9686903ba, 0x2fbc2f65935e4a2f, 0xc027c04ee79d8ec0,
-	0xde5fdebe81a160de, 0x1c701ce06c38fc1c, 0xfdd3fdbb2ee746fd, 0x4d294d52649a1f4d,
-	0x927292e4e0397692, 0x75c9758fbceafa75, 0x061806301e0c3606, 0x8a128a249809ae8a,
-	0xb2f2b2f940794bb2, 0xe6bfe66359d185e6, 0x0e380e70361c7e0e, 0x1f7c1ff8633ee71f,
-	0x62956237f7c45562, 0xd477d4eea3b53ad4, 0xa89aa829324d81a8, 0x966296c4f4315296,
-	0xf9c3f99b3aef62f9, 0xc533c566f697a3c5, 0x25942535b14a1025, 0x597959f220b2ab59,
-	0x842a8454ae15d084, 0x72d572b7a7e4c572, 0x39e439d5dd72ec39, 0x4c2d4c5a6198164c,
-	0x5e655eca3bbc945e, 0x78fd78e785f09f78, 0x38e038ddd870e538, 0x8c0a8c148605988c,
-	0xd163d1c6b2bf17d1, 0xa5aea5410b57e4a5, 0xe2afe2434dd9a1e2, 0x6199612ff8c24e61,
-	0xb3f6b3f1457b42b3, 0x21842115a5423421, 0x9c4a9c94d625089c, 0x1e781ef0663cee1e,
-	0x4311432252866143, 0xc73bc776fc93b1c7, 0xfcd7fcb32be54ffc, 0x0410042014082404,
-	0x515951b208a2e351, 0x995e99bcc72f2599, 0x6da96d4fc4da226d, 0x0d340d68391a650d,
-	0xfacffa8335e979fa, 0xdf5bdfb684a369df, 0x7ee57ed79bfca97e, 0x2490243db4481924,
-	0x3bec3bc5d776fe3b, 0xab96ab313d4b9aab, 0xce1fce3ed181f0ce, 0x1144118855229911,
-	0x8f068f0c8903838f, 0x4e254e4a6b9c044e, 0xb7e6b7d1517366b7, 0xeb8beb0b60cbe0eb,
-	0x3cf03cfdcc78c13c, 0x813e817cbf1ffd81, 0x946a94d4fe354094, 0xf7fbf7eb0cf31cf7,
-	0xb9deb9a1676f18b9, 0x134c13985f268b13, 0x2cb02c7d9c58512c, 0xd36bd3d6b8bb05d3,
-	0xe7bbe76b5cd38ce7, 0x6ea56e57cbdc396e, 0xc437c46ef395aac4, 0x030c03180f061b03,
-	0x5645568a13acdc56, 0x440d441a49885e44, 0x7fe17fdf9efea07f, 0xa99ea921374f88a9,
-	0x2aa82a4d8254672a, 0xbbd6bbb16d6b0abb, 0xc123c146e29f87c1, 0x535153a202a6f153,
-	0xdc57dcae8ba572dc, 0x0b2c0b582716530b, 0x9d4e9d9cd327019d, 0x6cad6c47c1d82b6c,
-	0x31c43195f562a431, 0x74cd7487b9e8f374, 0xf6fff6e309f115f6, 0x4605460a438c4c46,
-	0xac8aac092645a5ac, 0x891e893c970fb589, 0x145014a04428b414, 0xe1a3e15b42dfbae1,
-	0x165816b04e2ca616, 0x3ae83acdd274f73a, 0x69b9696fd0d20669, 0x092409482d124109,
-	0x70dd70a7ade0d770, 0xb6e2b6d954716fb6, 0xd067d0ceb7bd1ed0, 0xed93ed3b7ec7d6ed,
-	0xcc17cc2edb85e2cc, 0x4215422a57846842, 0x985a98b4c22d2c98, 0xa4aaa4490e55eda4,
-	0x28a0285d88507528, 0x5c6d5cda31b8865c, 0xf8c7f8933fed6bf8, 0x86228644a411c286,
-}
-
-RC := [ROUNDS + 1]u64 {
-	0x0000000000000000,
-	0x1823c6e887b8014f,
-	0x36a6d2f5796f9152,
-	0x60bc9b8ea30c7b35,
-	0x1de0d7c22e4bfe57,
-	0x157737e59ff04ada,
-	0x58c9290ab1a06b85,
-	0xbd5d10f4cb3e0567,
-	0xe427418ba77d95d8,
-	0xfbee7c66dd17479e,
-	0xca2dbf07ad5a8333,
-}
-
-transform :: proc (ctx: ^Whirlpool_Context) {
-	K, block, state, L: [8]u64
-
-	for i := 0; i < 8; i += 1 {block[i] = util.U64_BE(ctx.buffer[8 * i:])}
-
-	for i := 0; i < 8; i += 1 {
-		K[i] = ctx.hash[i]
-		state[i] = block[i] ~ K[i]
-	}
-
-	for r := 1; r <= ROUNDS; r += 1 {
-		for i := 0; i < 8; i += 1 {
-			L[i] = C0[byte(K[i % 8] >> 56)] ~
-				C1[byte(K[(i + 7) % 8] >> 48)] ~
-				C2[byte(K[(i + 6) % 8] >> 40)] ~
-				C3[byte(K[(i + 5) % 8] >> 32)] ~
-				C4[byte(K[(i + 4) % 8] >> 24)] ~
-				C5[byte(K[(i + 3) % 8] >> 16)] ~
-				C6[byte(K[(i + 2) % 8] >> 8)] ~
-				C7[byte(K[(i + 1) % 8])]
-		}
-		L[0] ~= RC[r]
-
-		for i := 0; i < 8; i += 1 {K[i] = L[i]}
-
-		for i := 0; i < 8; i += 1 {
-			L[i] = C0[byte(state[i % 8] >> 56)] ~
-				C1[byte(state[(i + 7) % 8] >> 48)] ~
-				C2[byte(state[(i + 6) % 8] >> 40)] ~
-				C3[byte(state[(i + 5) % 8] >> 32)] ~
-				C4[byte(state[(i + 4) % 8] >> 24)] ~
-				C5[byte(state[(i + 3) % 8] >> 16)] ~
-				C6[byte(state[(i + 2) % 8] >> 8)] ~
-				C7[byte(state[(i + 1) % 8])] ~
-				K[i % 8]
-		}
-		for i := 0; i < 8; i += 1 {state[i] = L[i]}
-	}
-	for i := 0; i < 8; i += 1 {ctx.hash[i] ~= state[i] ~ block[i]}
-}

+ 16 - 16
core/os/file_windows.odin

@@ -149,7 +149,7 @@ read_console :: proc(handle: win32.HANDLE, b: []byte) -> (n: int, err: Errno) {
 	return
 	return
 }
 }
 
 
-read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
+read :: proc(fd: Handle, data: []byte) -> (total_read: int, err: Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
@@ -158,32 +158,32 @@ read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 	
 	
 	m: u32
 	m: u32
 	is_console := win32.GetConsoleMode(handle, &m)
 	is_console := win32.GetConsoleMode(handle, &m)
-
-	single_read_length: win32.DWORD
-	total_read: int
 	length := len(data)
 	length := len(data)
 
 
 	// NOTE(Jeroen): `length` can't be casted to win32.DWORD here because it'll overflow if > 4 GiB and return 0 if exactly that.
 	// NOTE(Jeroen): `length` can't be casted to win32.DWORD here because it'll overflow if > 4 GiB and return 0 if exactly that.
 	to_read := min(i64(length), MAX_RW)
 	to_read := min(i64(length), MAX_RW)
 
 
-	e: win32.BOOL
 	if is_console {
 	if is_console {
-		n, err := read_console(handle, data[total_read:][:to_read])
-		total_read += n
+		total_read, err = read_console(handle, data[total_read:][:to_read])
 		if err != 0 {
 		if err != 0 {
-			return int(total_read), err
+			return total_read, err
 		}
 		}
 	} else {
 	} else {
 		// NOTE(Jeroen): So we cast it here *after* we've ensured that `to_read` is at most MAX_RW (1 GiB)
 		// NOTE(Jeroen): So we cast it here *after* we've ensured that `to_read` is at most MAX_RW (1 GiB)
-		e = win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &single_read_length, nil)
-	}
-	if single_read_length <= 0 || !e {
-		err := Errno(win32.GetLastError())
-		return int(total_read), err
+		bytes_read: win32.DWORD
+		if e := win32.ReadFile(handle, &data[total_read], win32.DWORD(to_read), &bytes_read, nil); e {
+			// Successful read can mean two things, including EOF, see:
+			// https://learn.microsoft.com/en-us/windows/win32/fileio/testing-for-the-end-of-a-file
+			if bytes_read == 0 {
+				return 0, ERROR_HANDLE_EOF
+			} else {
+				return int(bytes_read), ERROR_NONE
+			}
+		} else {
+			return 0, Errno(win32.GetLastError())
+		}
 	}
 	}
-	total_read += int(single_read_length)
-	
-	return int(total_read), ERROR_NONE
+	return total_read, ERROR_NONE
 }
 }
 
 
 seek :: proc(fd: Handle, offset: i64, whence: int) -> (i64, Errno) {
 seek :: proc(fd: Handle, offset: i64, whence: int) -> (i64, Errno) {

+ 6 - 3
core/os/stream.odin

@@ -27,9 +27,7 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
 	case .Read:
 	case .Read:
 		n_int, os_err = read(fd, p)
 		n_int, os_err = read(fd, p)
 		n = i64(n_int)
 		n = i64(n_int)
-		if os_err != 0 {
-			err = .Unknown
-		}
+
 	case .Read_At:
 	case .Read_At:
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 			n_int, os_err = read_at(fd, p, offset)
 			n_int, os_err = read_at(fd, p, offset)
@@ -57,6 +55,11 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
 		}
 		}
 	}
 	}
 	if err == nil && os_err != 0 {
 	if err == nil && os_err != 0 {
+		when ODIN_OS == .Windows {
+			if os_err == ERROR_HANDLE_EOF {
+				return n, .EOF
+			}
+		}
 		err = .Unknown
 		err = .Unknown
 	}
 	}
 	return
 	return

+ 3 - 29
examples/all/all_main.odin

@@ -23,31 +23,18 @@ import list             "core:container/intrusive/list"
 import topological_sort "core:container/topological_sort"
 import topological_sort "core:container/topological_sort"
 
 
 import crypto           "core:crypto"
 import crypto           "core:crypto"
-import blake            "core:crypto/blake"
 import blake2b          "core:crypto/blake2b"
 import blake2b          "core:crypto/blake2b"
 import blake2s          "core:crypto/blake2s"
 import blake2s          "core:crypto/blake2s"
 import chacha20         "core:crypto/chacha20"
 import chacha20         "core:crypto/chacha20"
 import chacha20poly1305 "core:crypto/chacha20poly1305"
 import chacha20poly1305 "core:crypto/chacha20poly1305"
-import gost             "core:crypto/gost"
-import groestl          "core:crypto/groestl"
-import haval            "core:crypto/haval"
-import jh               "core:crypto/jh"
-import keccak           "core:crypto/keccak"
-import md2              "core:crypto/md2"
-import md4              "core:crypto/md4"
-import md5              "core:crypto/md5"
+import keccak           "core:crypto/legacy/keccak"
+import md5              "core:crypto/legacy/md5"
+import sha1             "core:crypto/legacy/sha1"
 import poly1305         "core:crypto/poly1305"
 import poly1305         "core:crypto/poly1305"
-import ripemd           "core:crypto/ripemd"
-import sha1             "core:crypto/sha1"
 import sha2             "core:crypto/sha2"
 import sha2             "core:crypto/sha2"
 import sha3             "core:crypto/sha3"
 import sha3             "core:crypto/sha3"
 import shake            "core:crypto/shake"
 import shake            "core:crypto/shake"
 import sm3              "core:crypto/sm3"
 import sm3              "core:crypto/sm3"
-import streebog         "core:crypto/streebog"
-import tiger            "core:crypto/tiger"
-import tiger2           "core:crypto/tiger2"
-import crypto_util      "core:crypto/util"
-import whirlpool        "core:crypto/whirlpool"
 import x25519           "core:crypto/x25519"
 import x25519           "core:crypto/x25519"
 
 
 import pe               "core:debug/pe"
 import pe               "core:debug/pe"
@@ -150,31 +137,18 @@ _ :: lru
 _ :: list
 _ :: list
 _ :: topological_sort
 _ :: topological_sort
 _ :: crypto
 _ :: crypto
-_ :: blake
 _ :: blake2b
 _ :: blake2b
 _ :: blake2s
 _ :: blake2s
 _ :: chacha20
 _ :: chacha20
 _ :: chacha20poly1305
 _ :: chacha20poly1305
-_ :: gost
-_ :: groestl
-_ :: haval
-_ :: jh
 _ :: keccak
 _ :: keccak
-_ :: md2
-_ :: md4
 _ :: md5
 _ :: md5
 _ :: poly1305
 _ :: poly1305
-_ :: ripemd
 _ :: sha1
 _ :: sha1
 _ :: sha2
 _ :: sha2
 _ :: sha3
 _ :: sha3
 _ :: shake
 _ :: shake
 _ :: sm3
 _ :: sm3
-_ :: streebog
-_ :: tiger
-_ :: tiger2
-_ :: crypto_util
-_ :: whirlpool
 _ :: x25519
 _ :: x25519
 _ :: pe
 _ :: pe
 _ :: dynlib
 _ :: dynlib

+ 3 - 17
examples/all/all_vendor.odin

@@ -2,21 +2,14 @@ package all
 
 
 import botan_bindings "vendor:botan/bindings"
 import botan_bindings "vendor:botan/bindings"
 import botan_blake2b  "vendor:botan/blake2b"
 import botan_blake2b  "vendor:botan/blake2b"
-import gost           "vendor:botan/gost"
-import keccak         "vendor:botan/keccak"
-import md4            "vendor:botan/md4"
-import md5            "vendor:botan/md5"
-import ripemd         "vendor:botan/ripemd"
-import sha1           "vendor:botan/sha1"
+import keccak         "vendor:botan/legacy/keccak"
+import md5            "vendor:botan/legacy/md5"
+import sha1           "vendor:botan/legacy/sha1"
 import sha2           "vendor:botan/sha2"
 import sha2           "vendor:botan/sha2"
 import sha3           "vendor:botan/sha3"
 import sha3           "vendor:botan/sha3"
 import shake          "vendor:botan/shake"
 import shake          "vendor:botan/shake"
 import siphash        "vendor:botan/siphash"
 import siphash        "vendor:botan/siphash"
-import skein512       "vendor:botan/skein512"
 import sm3            "vendor:botan/sm3"
 import sm3            "vendor:botan/sm3"
-import streebog       "vendor:botan/streebog"
-import tiger          "vendor:botan/tiger"
-import whirlpool      "vendor:botan/whirlpool"
 
 
 import cgltf      "vendor:cgltf"
 import cgltf      "vendor:cgltf"
 // import commonmark "vendor:commonmark"
 // import commonmark "vendor:commonmark"
@@ -48,21 +41,14 @@ import fontstash "vendor:fontstash"
 
 
 _ :: botan_bindings
 _ :: botan_bindings
 _ :: botan_blake2b
 _ :: botan_blake2b
-_ :: gost
 _ :: keccak
 _ :: keccak
-_ :: md4
 _ :: md5
 _ :: md5
-_ :: ripemd
 _ :: sha1
 _ :: sha1
 _ :: sha2
 _ :: sha2
 _ :: sha3
 _ :: sha3
 _ :: shake
 _ :: shake
 _ :: siphash
 _ :: siphash
-_ :: skein512
 _ :: sm3
 _ :: sm3
-_ :: streebog
-_ :: tiger
-_ :: whirlpool
 
 
 
 
 _ :: cgltf
 _ :: cgltf

+ 1 - 1
src/string.cpp

@@ -205,7 +205,7 @@ gb_internal gb_inline isize string_extension_position(String const &str) {
 	isize dot_pos = -1;
 	isize dot_pos = -1;
 	isize i = str.len;
 	isize i = str.len;
 	while (i --> 0) {
 	while (i --> 0) {
-		if (str[i] == GB_PATH_SEPARATOR)
+		if (str[i] == '\\' || str[i] == '/')
 			break;
 			break;
 		if (str[i] == '.') {
 		if (str[i] == '.') {
 			dot_pos = i;
 			dot_pos = i;

+ 19 - 675
tests/core/crypto/test_core_crypto.odin

@@ -16,28 +16,16 @@ import "core:testing"
 import "core:fmt"
 import "core:fmt"
 import "core:strings"
 import "core:strings"
 
 
-import "core:crypto/md2"
-import "core:crypto/md4"
-import "core:crypto/md5"
-import "core:crypto/sha1"
 import "core:crypto/sha2"
 import "core:crypto/sha2"
 import "core:crypto/sha3"
 import "core:crypto/sha3"
-import "core:crypto/keccak"
 import "core:crypto/shake"
 import "core:crypto/shake"
-import "core:crypto/whirlpool"
-import "core:crypto/ripemd"
-import "core:crypto/blake"
 import "core:crypto/blake2b"
 import "core:crypto/blake2b"
 import "core:crypto/blake2s"
 import "core:crypto/blake2s"
-import "core:crypto/tiger"
-import "core:crypto/tiger2"
-import "core:crypto/gost"
-import "core:crypto/streebog"
 import "core:crypto/sm3"
 import "core:crypto/sm3"
-import "core:crypto/jh"
-import "core:crypto/groestl"
-import "core:crypto/haval"
 import "core:crypto/siphash"
 import "core:crypto/siphash"
+import "core:crypto/legacy/keccak"
+import "core:crypto/legacy/md5"
+import "core:crypto/legacy/sha1"
 import "core:os"
 import "core:os"
 
 
 TEST_count := 0
 TEST_count := 0
@@ -63,14 +51,13 @@ when ODIN_TEST {
 
 
 main :: proc() {
 main :: proc() {
 	t := testing.T{}
 	t := testing.T{}
-	test_md2(&t)
-	test_md4(&t)
 	test_md5(&t)
 	test_md5(&t)
 	test_sha1(&t)
 	test_sha1(&t)
 	test_sha224(&t)
 	test_sha224(&t)
 	test_sha256(&t)
 	test_sha256(&t)
 	test_sha384(&t)
 	test_sha384(&t)
 	test_sha512(&t)
 	test_sha512(&t)
+	test_sha512_256(&t)
 	test_sha3_224(&t)
 	test_sha3_224(&t)
 	test_sha3_256(&t)
 	test_sha3_256(&t)
 	test_sha3_384(&t)
 	test_sha3_384(&t)
@@ -81,40 +68,9 @@ main :: proc() {
 	test_keccak_256(&t)
 	test_keccak_256(&t)
 	test_keccak_384(&t)
 	test_keccak_384(&t)
 	test_keccak_512(&t)
 	test_keccak_512(&t)
-	test_whirlpool(&t)
-	test_gost(&t)
-	test_streebog_256(&t)
-	test_streebog_512(&t)
-	test_blake_224(&t)
-	test_blake_256(&t)
-	test_blake_384(&t)
-	test_blake_512(&t)
 	test_blake2b(&t)
 	test_blake2b(&t)
 	test_blake2s(&t)
 	test_blake2s(&t)
-	test_ripemd_128(&t)
-	test_ripemd_160(&t)
-	test_ripemd_256(&t)
-	test_ripemd_320(&t)
-	test_tiger_128(&t)
-	test_tiger_160(&t)
-	test_tiger_192(&t)
-	test_tiger2_128(&t)
-	test_tiger2_160(&t)
-	test_tiger2_192(&t)
 	test_sm3(&t)
 	test_sm3(&t)
-	test_jh_224(&t)
-	test_jh_256(&t)
-	test_jh_384(&t)
-	test_jh_512(&t)
-	test_groestl_224(&t)
-	test_groestl_256(&t)
-	test_groestl_384(&t)
-	test_groestl_512(&t)
-	test_haval_128(&t)
-	test_haval_160(&t)
-	test_haval_192(&t)
-	test_haval_224(&t)
-	test_haval_256(&t)
 	test_siphash_2_4(&t)
 	test_siphash_2_4(&t)
 
 
 	// "modern" crypto tests
 	// "modern" crypto tests
@@ -147,44 +103,6 @@ hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string
 	return string(buf)
 	return string(buf)
 }
 }
 
 
-@(test)
-test_md2 :: proc(t: ^testing.T) {
-	// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1319
-	test_vectors := [?]TestHash {
-		TestHash{"8350e5a3e24c153df2275c9f80692773", ""},
-		TestHash{"32ec01ec4a6dac72c0ab96fb34c0b5d1", "a"},
-		TestHash{"da853b0d3f88d99b30283a69e6ded6bb", "abc"},
-		TestHash{"ab4f496bfb2a530b219ff33031fe06b0", "message digest"},
-		TestHash{"4e8ddff3650292ab5a4108c3aa47940b", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"da33def2a42df13975352846c30338cd", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"d5976f79d83d3a0dc9806c3c66f3efd8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := md2.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_md4 :: proc(t: ^testing.T) {
-	// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1320
-	test_vectors := [?]TestHash {
-		TestHash{"31d6cfe0d16ae931b73c59d7e0c089c0", ""},
-		TestHash{"bde52cb31de33e46245e05fbdbd6fb24", "a"},
-		TestHash{"a448017aaf21d8525fc10ae87aa6729d", "abc"},
-		TestHash{"d9130a8164549fe818874806e1c7014b", "message digest"},
-		TestHash{"d79e1c308aa5bbcdeea8ed63df412da9", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"043f8582f241db351ce627e153e7f0e4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"e33b4ddc9c38f2199c3e7b164fcc0536", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := md4.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
 @(test)
 @(test)
 test_md5 :: proc(t: ^testing.T) {
 test_md5 :: proc(t: ^testing.T) {
 	// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
 	// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
@@ -301,6 +219,21 @@ test_sha512 :: proc(t: ^testing.T) {
 	}
 	}
 }
 }
 
 
+@(test)
+test_sha512_256 :: proc(t: ^testing.T) {
+	// Test vectors from
+	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+	test_vectors := [?]TestHash {
+		TestHash{"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23", "abc"},
+		TestHash{"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
+	}
+	for v, _ in test_vectors {
+		computed     := sha2.hash_512_256(v.str)
+		computed_str := hex_string(computed[:])
+		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
+	}
+}
+
 @(test)
 @(test)
 test_sha3_224 :: proc(t: ^testing.T) {
 test_sha3_224 :: proc(t: ^testing.T) {
 	// Test vectors from
 	// Test vectors from
@@ -481,136 +414,6 @@ test_keccak_512 :: proc(t: ^testing.T) {
 	}
 	}
 }
 }
 
 
-@(test)
-test_whirlpool :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html
-	test_vectors := [?]TestHash {
-		TestHash{"19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3", ""},
-		TestHash{"8aca2602792aec6f11a67206531fb7d7f0dff59413145e6973c45001d0087b42d11bc645413aeff63a42391a39145a591a92200d560195e53b478584fdae231a", "a"},
-		TestHash{"33e24e6cbebf168016942df8a7174048f9cebc45cbd829c3b94b401a498acb11c5abcca7f2a1238aaf534371e87a4e4b19758965d5a35a7cad87cf5517043d97", "ab"},
-		TestHash{"4e2448a4c6f486bb16b6562c73b4020bf3043e3a731bce721ae1b303d97e6d4c7181eebdb6c57e277d0e34957114cbd6c797fc9d95d8b582d225292076d4eef5", "abc"},
-		TestHash{"bda164f0b930c43a1bacb5df880b205d15ac847add35145bf25d991ae74f0b72b1ac794f8aacda5fcb3c47038c954742b1857b5856519de4d1e54bfa2fa4eac5", "abcd"},
-		TestHash{"5d745e26ccb20fe655d39c9e7f69455758fbae541cb892b3581e4869244ab35b4fd6078f5d28b1f1a217452a67d9801033d92724a221255a5e377fe9e9e5f0b2", "abcde"},
-		TestHash{"a73e425459567308ba5f9eb2ae23570d0d0575eb1357ecf6ac88d4e0358b0ac3ea2371261f5d4c070211784b525911b9eec0ad968429bb7c7891d341cff4e811", "abcdef"},
-		TestHash{"08b388f68fd3eb51906ac3d3c699b8e9c3ac65d7ceb49d2e34f8a482cbc3082bc401cead90e85a97b8647c948bf35e448740b79659f3bee42145f0bd653d1f25", "abcdefg"},
-		TestHash{"1f1a84d30612820243afe2022712f9dac6d07c4c8bb41b40eacab0184c8d82275da5bcadbb35c7ca1960ff21c90acbae8c14e48d9309e4819027900e882c7ad9", "abcdefgh"},
-		TestHash{"11882bc9a31ac1cf1c41dcd9fd6fdd3ccdb9b017fc7f4582680134f314d7bb49af4c71f5a920bc0a6a3c1ff9a00021bf361d9867fe636b0bc1da1552e4237de4", "abcdefghi"},
-		TestHash{"717163de24809ffcf7ff6d5aba72b8d67c2129721953c252a4ddfb107614be857cbd76a9d5927de14633d6bdc9ddf335160b919db5c6f12cb2e6549181912eef", "abcdefghij"},
-		TestHash{"b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c", "The quick brown fox jumps over the lazy eog"},
-	}
-	for v, _ in test_vectors {
-		computed     := whirlpool.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_gost :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"981e5f3ca30c841487830f84fb433e13ac1101569b9c13584ac483234cd656c0", ""},
-		TestHash{"e74c52dd282183bf37af0079c9f78055715a103f17e3133ceff1aacf2f403011", "a"},
-		TestHash{"b285056dbf18d7392d7677369524dd14747459ed8143997e163b2986f92fd42c", "abc"},
-		TestHash{"bc6041dd2aa401ebfa6e9886734174febdb4729aa972d60f549ac39b29721ba0", "message digest"},
-		TestHash{"9004294a361a508c586fe53d1f1b02746765e71b765472786e4770d565830a76", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"73b70a39497de53a6e08c67b6d4db853540f03e9389299d9b0156ef7e85d0f61", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"6bc7b38989b28cf93ae8842bf9d752905910a7528a61e5bce0782de43e610c90", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-		TestHash{"2cefc2f7b7bdc514e18ea57fa74ff357e7fa17d652c75f69cb1be7893ede48eb", "This is message, length=32 bytes"},
-		TestHash{"c3730c5cbccacf915ac292676f21e8bd4ef75331d9405e5f1a61dc3130a65011", "Suppose the original message has length = 50 bytes"},
-	}
-	for v, _ in test_vectors {
-		computed     := gost.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_streebog_256 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb", ""},
-		TestHash{"3e7dea7f2384b6c5a3d0e24aaa29c05e89ddd762145030ec22c71a6db8b2c1f4", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"36816a824dcbe7d6171aa58500741f2ea2757ae2e1784ab72c5c3c6c198d71da", "The quick brown fox jumps over the lazy dog."},
-	}
-	for v, _ in test_vectors {
-		computed     := streebog.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_streebog_512 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a", ""},
-		TestHash{"d2b793a0bb6cb5904828b5b6dcfb443bb8f33efc06ad09368878ae4cdc8245b97e60802469bed1e7c21a64ff0b179a6a1e0bb74d92965450a0adab69162c00fe", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"fe0c42f267d921f940faa72bd9fcf84f9f1bd7e9d055e9816e4c2ace1ec83be82d2957cd59b86e123d8f5adee80b3ca08a017599a9fc1a14d940cf87c77df070", "The quick brown fox jumps over the lazy dog."},
-	}
-	for v, _ in test_vectors {
-		computed     := streebog.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_blake_224 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"7dc5313b1c04512a174bd6503b89607aecbee0903d40a8a569c94eed", ""},
-		TestHash{"304c27fdbf308aea06955e331adc6814223a21fccd24c09fde9eda7b", "ube"},
-		TestHash{"cfb6848add73e1cb47994c4765df33b8f973702705a30a71fe4747a3", "BLAKE"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_blake_256 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"716f6e863f744b9ac22c97ec7b76ea5f5908bc5b2f67c61510bfc4751384ea7a", ""},
-		TestHash{"e802fe2a73fbe5853408f051d040aeb3a76a4d7a0fc5c3415d1af090f76a2c81", "ube"},
-		TestHash{"07663e00cf96fbc136cf7b1ee099c95346ba3920893d18cc8851f22ee2e36aa6", "BLAKE"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_blake_384 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"c6cbd89c926ab525c242e6621f2f5fa73aa4afe3d9e24aed727faaadd6af38b620bdb623dd2b4788b1c8086984af8706", ""},
-		TestHash{"8f22f120b2b99dd4fd32b98c8c83bd87abd6413f7317be936b1997511247fc68ae781c6f42113224ccbc1567b0e88593", "ube"},
-		TestHash{"f28742f7243990875d07e6afcff962edabdf7e9d19ddea6eae31d094c7fa6d9b00c8213a02ddf1e2d9894f3162345d85", "BLAKE"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_blake_512 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"a8cfbbd73726062df0c6864dda65defe58ef0cc52a5625090fa17601e1eecd1b628e94f396ae402a00acc9eab77b4d4c2e852aaaa25a636d80af3fc7913ef5b8", ""},
-		TestHash{"49a24ca8f230936f938c19484d46b58f13ea4448ddadafecdf01419b1e1dd922680be2de84069187973ab61b10574da2ee50cbeaade68ea9391c8ec041b76be0", "ube"},
-		TestHash{"7bf805d0d8de36802b882e65d0515aa7682a2be97a9d9ec1399f4be2eff7de07684d7099124c8ac81c1c7c200d24ba68c6222e75062e04feb0e9dd589aa6e3b7", "BLAKE"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
 @(test)
 @(test)
 test_blake2b :: proc(t: ^testing.T) {
 test_blake2b :: proc(t: ^testing.T) {
 	test_vectors := [?]TestHash {
 	test_vectors := [?]TestHash {
@@ -637,188 +440,6 @@ test_blake2s :: proc(t: ^testing.T) {
 	}
 	}
 }
 }
 
 
-@(test)
-test_ripemd_128 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
-	test_vectors := [?]TestHash {
-		TestHash{"cdf26213a150dc3ecb610f18f6b38b46", ""},
-		TestHash{"86be7afa339d0fc7cfc785e72f578d33", "a"},
-		TestHash{"c14a12199c66e4ba84636b0f69144c77", "abc"},
-		TestHash{"9e327b3d6e523062afc1132d7df9d1b8", "message digest"},
-		TestHash{"fd2aa607f71dc8f510714922b371834e", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"a1aa0689d0fafa2ddc22e88b49133a06", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"d1e959eb179c911faea4624c60c5c702", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors {
-		computed     := ripemd.hash_128(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_ripemd_160 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
-	test_vectors := [?]TestHash {
-		TestHash{"9c1185a5c5e9fc54612808977ee8f548b2258d31", ""},
-		TestHash{"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", "a"},
-		TestHash{"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", "abc"},
-		TestHash{"5d0689ef49d2fae572b881b123a85ffa21595f36", "message digest"},
-		TestHash{"f71c27109c692c1b56bbdceb5b9d2865b3708dbc", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"12a053384a9c0c88e405a06c27dcf49ada62eb2b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"b0e20b6e3116640286ed3a87a5713079b21f5189", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors {
-		computed     := ripemd.hash_160(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_ripemd_256 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
-	test_vectors := [?]TestHash {
-		TestHash{"02ba4c4e5f8ecd1877fc52d64d30e37a2d9774fb1e5d026380ae0168e3c5522d", ""},
-		TestHash{"f9333e45d857f5d90a91bab70a1eba0cfb1be4b0783c9acfcd883a9134692925", "a"},
-		TestHash{"afbd6e228b9d8cbbcef5ca2d03e6dba10ac0bc7dcbe4680e1e42d2e975459b65", "abc"},
-		TestHash{"87e971759a1ce47a514d5c914c392c9018c7c46bc14465554afcdf54a5070c0e", "message digest"},
-		TestHash{"649d3034751ea216776bf9a18acc81bc7896118a5197968782dd1fd97d8d5133", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"3843045583aac6c8c8d9128573e7a9809afb2a0f34ccc36ea9e72f16f6368e3f", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"5740a408ac16b720b84424ae931cbb1fe363d1d0bf4017f1a89f7ea6de77a0b8", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors {
-		computed     := ripemd.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_ripemd_320 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
-	test_vectors := [?]TestHash {
-		TestHash{"22d65d5661536cdc75c1fdf5c6de7b41b9f27325ebc61e8557177d705a0ec880151c3a32a00899b8", ""},
-		TestHash{"ce78850638f92658a5a585097579926dda667a5716562cfcf6fbe77f63542f99b04705d6970dff5d", "a"},
-		TestHash{"de4c01b3054f8930a79d09ae738e92301e5a17085beffdc1b8d116713e74f82fa942d64cdbc4682d", "abc"},
-		TestHash{"3a8e28502ed45d422f68844f9dd316e7b98533fa3f2a91d29f84d425c88d6b4eff727df66a7c0197", "message digest"},
-		TestHash{"cabdb1810b92470a2093aa6bce05952c28348cf43ff60841975166bb40ed234004b8824463e6b009", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"d034a7950cf722021ba4b84df769a5de2060e259df4c9bb4a4268c0e935bbc7470a969c9d072a1ac", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"ed544940c86d67f250d232c30b7b3e5770e0c60c8cb9a4cafe3b11388af9920e1b99230b843c86a4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors {
-		computed     := ripemd.hash_320(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger_128 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"3293ac630c13f0245f92bbb1766e1616", ""},
-		TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc", "a"},
-		TestHash{"2aab1484e8c158f2bfb8c5ff41b57a52", "abc"},
-		TestHash{"d981f8cb78201a950dcf3048751e441c", "message digest"},
-		TestHash{"1714a472eee57d30040412bfcc55032a", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"8dcea680a17583ee502ba38a3c368651", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"1c14795529fd9f207a958f84c52f11e8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-		TestHash{"6d12a41e72e644f017b6f0e2f7b44c62", "The quick brown fox jumps over the lazy dog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger.hash_128(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger_160 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"3293ac630c13f0245f92bbb1766e16167a4e5849", ""},
-		TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f", "a"},
-		TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c", "abc"},
-		TestHash{"d981f8cb78201a950dcf3048751e441c517fca1a", "message digest"},
-		TestHash{"1714a472eee57d30040412bfcc55032a0b11602f", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"8dcea680a17583ee502ba38a3c368651890ffbcc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cab", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-		TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5", "The quick brown fox jumps over the lazy dog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger.hash_160(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger_192 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"3293ac630c13f0245f92bbb1766e16167a4e58492dde73f3", ""},
-		TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f5f247809", "a"},
-		TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c957b5f93", "abc"},
-		TestHash{"d981f8cb78201a950dcf3048751e441c517fca1aa55a29f6", "message digest"},
-		TestHash{"1714a472eee57d30040412bfcc55032a0b11602ff37beee9", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e7b53f78e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"8dcea680a17583ee502ba38a3c368651890ffbccdc49a8cc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cabdfd91bfd", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-		TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5d2c5b075", "The quick brown fox jumps over the lazy dog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger.hash_192(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger2_128 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"4441be75f6018773c206c22745374b92", ""},
-		TestHash{"976abff8062a2e9dcea3a1ace966ed9c", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"09c11330283a27efb51930aa7dc1ec62", "The quick brown fox jumps over the lazy cog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger2.hash_128(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger2_160 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"4441be75f6018773c206c22745374b924aa8313f", ""},
-		TestHash{"976abff8062a2e9dcea3a1ace966ed9c19cb8555", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"09c11330283a27efb51930aa7dc1ec624ff738a8", "The quick brown fox jumps over the lazy cog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger2.hash_160(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_tiger2_192 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"4441be75f6018773c206c22745374b924aa8313fef919f41", ""},
-		TestHash{"976abff8062a2e9dcea3a1ace966ed9c19cb85558b4976d8", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"09c11330283a27efb51930aa7dc1ec624ff738a8d9bdd3df", "The quick brown fox jumps over the lazy cog"},
-	}
-	for v, _ in test_vectors {
-		computed     := tiger2.hash_192(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
 @(test)
 @(test)
 test_sm3 :: proc(t: ^testing.T) {
 test_sm3 :: proc(t: ^testing.T) {
 	test_vectors := [?]TestHash {
 	test_vectors := [?]TestHash {
@@ -835,283 +456,6 @@ test_sm3 :: proc(t: ^testing.T) {
 	}
 	}
 }
 }
 
 
-@(test)
-test_jh_224 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"2c99df889b019309051c60fecc2bd285a774940e43175b76b2626630", ""},
-		TestHash{"e715f969fb61b203a97e494aab92d91a9cec52f0933436b0d63bf722", "a"},
-		TestHash{"c2b1967e635bd55b6a4d36f863ac4a877be302251d68692873007281", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := jh.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_jh_256 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"46e64619c18bb0a92a5e87185a47eef83ca747b8fcc8e1412921357e326df434", ""},
-		TestHash{"d52c0c130a1bc0ae5136375637a52773e150c71efe1c968df8956f6745b05386", "a"},
-		TestHash{"fc4214867025a8af94c614353b3553b10e561ae749fc18c40e5fd44a7a4ecd1b", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := jh.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_jh_384 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"2fe5f71b1b3290d3c017fb3c1a4d02a5cbeb03a0476481e25082434a881994b0ff99e078d2c16b105ad069b569315328", ""},
-		TestHash{"77de897ca4fd5dadfbcbd1d8d4ea3c3c1426855e38661325853e92b069f3fe156729f6bbb9a5892c7c18a77f1cb9d0bb", "a"},
-		TestHash{"6f73d9b9b8ed362f8180fb26020725b40bd6ca75b3b947405f26c4c37a885ce028876dc42e379d2faf6146fed3ea0e42", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := jh.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_jh_512 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"90ecf2f76f9d2c8017d979ad5ab96b87d58fc8fc4b83060f3f900774faa2c8fabe69c5f4ff1ec2b61d6b316941cedee117fb04b1f4c5bc1b919ae841c50eec4f", ""},
-		TestHash{"f12c87e986daff17c481c81a99a39b603ca6bafcd320c5735523b97cb9a26f7681bad62ffad9aad0e21160a05f773fb0d1434ca4cbcb0483f480a171ada1561b", "a"},
-		TestHash{"bafb8e710b35eabeb1a48220c4b0987c2c985b6e73b7b31d164bfb9d67c94d99d7bc43b474a25e647cd6cc36334b6a00a5f2a85fae74907fd2885c6168132fe7", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := jh.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_groestl_224 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"f2e180fb5947be964cd584e22e496242c6a329c577fc4ce8c36d34c3", ""},
-		TestHash{"2dfa5bd326c23c451b1202d99e6cee98a98c45927e1a31077f538712", "a"},
-		TestHash{"c8a3e7274d599900ae673419683c3626a2e49ed57308ed2687508bef", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := groestl.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_groestl_256 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"1a52d11d550039be16107f9c58db9ebcc417f16f736adb2502567119f0083467", ""},
-		TestHash{"3645c245bb31223ad93c80885b719aa40b4bed0a9d9d6e7c11fe99e59ca350b5", "a"},
-		TestHash{"2679d98913bee62e57fdbdde97ddb328373548c6b24fc587cc3d08f2a02a529c", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := groestl.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_groestl_384 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"ac353c1095ace21439251007862d6c62f829ddbe6de4f78e68d310a9205a736d8b11d99bffe448f57a1cfa2934f044a5", ""},
-		TestHash{"13fce7bd9fc69b67cc12c77e765a0a97794c585f89df39fbff32408e060d7d9225c7e80fd87da647686888bda896c342", "a"},
-		TestHash{"1c446cd70a6de52c9db386f5305aae029fe5a4120bc6230b7cd3a5e1ef1949cc8e6d2548c24cd7347b5ba512628a62f6", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := groestl.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_groestl_512 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"6d3ad29d279110eef3adbd66de2a0345a77baede1557f5d099fce0c03d6dc2ba8e6d4a6633dfbd66053c20faa87d1a11f39a7fbe4a6c2f009801370308fc4ad8", ""},
-		TestHash{"9ef345a835ee35d6d0d462ce45f722d84b5ca41fde9c81a98a22cfb4f7425720511b03a258cdc055bf8e9179dc9bdb5d88bed906c71125d4cf0cd39d3d7bebc7", "a"},
-		TestHash{"862849fd911852cd54beefa88759db4cead0ef8e36aaf15398303c5c4cbc016d9b4c42b32081cbdcba710d2693e7663d244fae116ec29ffb40168baf44f944e7", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := groestl.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_haval_128 :: proc(t: ^testing.T) {
-	test_vectors_3 := [?]TestHash {
-		TestHash{"c68f39913f901f3ddf44c707357a7d70", ""},
-		TestHash{"0cd40739683e15f01ca5dbceef4059f1", "a"},
-		TestHash{"9e40ed883fb63e985d299b40cda2b8f2", "abc"},
-		TestHash{"3caf4a79e81adcd6d1716bcc1cef4573", "message digest"},
-		TestHash{"dc502247fb3eb8376109eda32d361d82", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"44068770868768964d1f2c3bff4aa3d8", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"de5eb3f7d9eb08fae7a07d68e3047ec6", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors_3 {
-		computed     := haval.hash_128_3(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_4 := [?]TestHash {
-		TestHash{"ee6bbf4d6a46a679b3a856c88538bb98", ""},
-		TestHash{"5cd07f03330c3b5020b29ba75911e17d", "a"},
-	}
-	for v, _ in test_vectors_4 {
-		computed     := haval.hash_128_4(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_5 := [?]TestHash {
-		TestHash{"184b8482a0c050dca54b59c7f05bf5dd", ""},
-		TestHash{"f23fbe704be8494bfa7a7fb4f8ab09e5", "a"},
-	}
-	for v, _ in test_vectors_5 {
-		computed     := haval.hash_128_5(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_haval_160 :: proc(t: ^testing.T) {
-	test_vectors_3 := [?]TestHash {
-		TestHash{"d353c3ae22a25401d257643836d7231a9a95f953", ""},
-		TestHash{"4da08f514a7275dbc4cece4a347385983983a830", "a"},
-		TestHash{"b21e876c4d391e2a897661149d83576b5530a089", "abc"},
-		TestHash{"43a47f6f1c016207f08be8115c0977bf155346da", "message digest"},
-		TestHash{"eba9fa6050f24c07c29d1834a60900ea4e32e61b", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"c30bce448cf8cfe957c141e90c0a063497cdfeeb", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"97dc988d97caae757be7523c4e8d4ea63007a4b9", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-	}
-	for v, _ in test_vectors_3 {
-		computed     := haval.hash_160_3(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_4 := [?]TestHash {
-		TestHash{"1d33aae1be4146dbaaca0b6e70d7a11f10801525", ""},
-		TestHash{"e0a5be29627332034d4dd8a910a1a0e6fe04084d", "a"},
-	}
-	for v, _ in test_vectors_4 {
-		computed     := haval.hash_160_4(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_5 := [?]TestHash {
-		TestHash{"255158cfc1eed1a7be7c55ddd64d9790415b933b", ""},
-		TestHash{"f5147df7abc5e3c81b031268927c2b5761b5a2b5", "a"},
-	}
-	for v, _ in test_vectors_5 {
-		computed     := haval.hash_160_5(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_haval_192 :: proc(t: ^testing.T) {
-	test_vectors_3 := [?]TestHash {
-		TestHash{"e9c48d7903eaf2a91c5b350151efcb175c0fc82de2289a4e", ""},
-		TestHash{"b359c8835647f5697472431c142731ff6e2cddcacc4f6e08", "a"},
-	}
-	for v, _ in test_vectors_3 {
-		computed     := haval.hash_192_3(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_4 := [?]TestHash {
-		TestHash{"4a8372945afa55c7dead800311272523ca19d42ea47b72da", ""},
-		TestHash{"856c19f86214ea9a8a2f0c4b758b973cce72a2d8ff55505c", "a"},
-	}
-	for v, _ in test_vectors_4 {
-		computed     := haval.hash_192_4(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_5 := [?]TestHash {
-		TestHash{"4839d0626f95935e17ee2fc4509387bbe2cc46cb382ffe85", ""},
-		TestHash{"5ffa3b3548a6e2cfc06b7908ceb5263595df67cf9c4b9341", "a"},
-	}
-	for v, _ in test_vectors_5 {
-		computed     := haval.hash_192_5(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_haval_224 :: proc(t: ^testing.T) {
-	test_vectors_3 := [?]TestHash {
-		TestHash{"c5aae9d47bffcaaf84a8c6e7ccacd60a0dd1932be7b1a192b9214b6d", ""},
-		TestHash{"731814ba5605c59b673e4caae4ad28eeb515b3abc2b198336794e17b", "a"},
-	}
-	for v, _ in test_vectors_3 {
-		computed     := haval.hash_224_3(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_4 := [?]TestHash {
-		TestHash{"3e56243275b3b81561750550e36fcd676ad2f5dd9e15f2e89e6ed78e", ""},
-		TestHash{"742f1dbeeaf17f74960558b44f08aa98bdc7d967e6c0ab8f799b3ac1", "a"},
-	}
-	for v, _ in test_vectors_4 {
-		computed     := haval.hash_224_4(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_5 := [?]TestHash {
-		TestHash{"4a0513c032754f5582a758d35917ac9adf3854219b39e3ac77d1837e", ""},
-		TestHash{"67b3cb8d4068e3641fa4f156e03b52978b421947328bfb9168c7655d", "a"},
-	}
-	for v, _ in test_vectors_5 {
-		computed     := haval.hash_224_5(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_haval_256 :: proc(t: ^testing.T) {
-	test_vectors_3 := [?]TestHash {
-		TestHash{"4f6938531f0bc8991f62da7bbd6f7de3fad44562b8c6f4ebf146d5b4e46f7c17", ""},
-		TestHash{"47c838fbb4081d9525a0ff9b1e2c05a98f625714e72db289010374e27db021d8", "a"},
-	}
-	for v, _ in test_vectors_3 {
-		computed     := haval.hash_256_3(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_4 := [?]TestHash {
-		TestHash{"c92b2e23091e80e375dadce26982482d197b1a2521be82da819f8ca2c579b99b", ""},
-		TestHash{"e686d2394a49b44d306ece295cf9021553221db132b36cc0ff5b593d39295899", "a"},
-	}
-	for v, _ in test_vectors_4 {
-		computed     := haval.hash_256_4(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-	test_vectors_5 := [?]TestHash {
-		TestHash{"be417bb4dd5cfb76c7126f4f8eeb1553a449039307b1a3cd451dbfdc0fbbe330", ""},
-		TestHash{"de8fd5ee72a5e4265af0a756f4e1a1f65c9b2b2f47cf17ecf0d1b88679a3e22f", "a"},
-	}
-	for v, _ in test_vectors_5 {
-		computed     := haval.hash_256_5(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
 @(test)
 @(test)
 test_siphash_2_4 :: proc(t: ^testing.T) {
 test_siphash_2_4 :: proc(t: ^testing.T) {
 	// Test vectors from
 	// Test vectors from

+ 3 - 222
tests/vendor/botan/test_vendor_botan.odin

@@ -17,21 +17,14 @@ import "core:fmt"
 import "core:os"
 import "core:os"
 import "core:strings"
 import "core:strings"
 
 
-import "vendor:botan/md4"
-import "vendor:botan/md5"
-import "vendor:botan/sha1"
+import "vendor:botan/legacy/md5"
+import "vendor:botan/legacy/sha1"
 import "vendor:botan/sha2"
 import "vendor:botan/sha2"
 import "vendor:botan/sha3"
 import "vendor:botan/sha3"
-import "vendor:botan/keccak"
+import "vendor:botan/legacy/keccak"
 import "vendor:botan/shake"
 import "vendor:botan/shake"
-import "vendor:botan/whirlpool"
-import "vendor:botan/ripemd"
 import "vendor:botan/blake2b"
 import "vendor:botan/blake2b"
-import "vendor:botan/tiger"
-import "vendor:botan/gost"
-import "vendor:botan/streebog"
 import "vendor:botan/sm3"
 import "vendor:botan/sm3"
-import "vendor:botan/skein512"
 import "vendor:botan/siphash"
 import "vendor:botan/siphash"
 
 
 TEST_count := 0
 TEST_count := 0
@@ -59,7 +52,6 @@ when ODIN_TEST {
 
 
 main :: proc() {
 main :: proc() {
     t := testing.T{}
     t := testing.T{}
-    test_md4(&t)
     test_md5(&t)
     test_md5(&t)
     test_sha1(&t)
     test_sha1(&t)
     test_sha224(&t)
     test_sha224(&t)
@@ -73,18 +65,8 @@ main :: proc() {
     // test_shake_128(&t)
     // test_shake_128(&t)
     // test_shake_256(&t)
     // test_shake_256(&t)
     test_keccak_512(&t)
     test_keccak_512(&t)
-    test_whirlpool(&t)
-    test_gost(&t)
-    test_streebog_256(&t)
-    test_streebog_512(&t)
     test_blake2b(&t)
     test_blake2b(&t)
-    test_ripemd_160(&t)
-    // test_tiger_128(&t)
-    // test_tiger_160(&t)
-    // test_tiger_192(&t)
     test_sm3(&t)
     test_sm3(&t)
-    test_skein512_256(&t)
-    test_skein512_512(&t)
     test_siphash_2_4(&t)
     test_siphash_2_4(&t)
 
 
     fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
     fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
@@ -108,25 +90,6 @@ hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string
     return string(buf)
     return string(buf)
 }
 }
 
 
-@(test)
-test_md4 :: proc(t: ^testing.T) {
-    // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1320
-    test_vectors := [?]TestHash {
-        TestHash{"31d6cfe0d16ae931b73c59d7e0c089c0", ""},
-        TestHash{"bde52cb31de33e46245e05fbdbd6fb24", "a"},
-        TestHash{"a448017aaf21d8525fc10ae87aa6729d", "abc"},
-        TestHash{"d9130a8164549fe818874806e1c7014b", "message digest"},
-        TestHash{"d79e1c308aa5bbcdeea8ed63df412da9", "abcdefghijklmnopqrstuvwxyz"},
-        TestHash{"043f8582f241db351ce627e153e7f0e4", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-        TestHash{"e33b4ddc9c38f2199c3e7b164fcc0536", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-    }
-    for v, _ in test_vectors {
-        computed     := md4.hash(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
 @(test)
 @(test)
 test_md5 :: proc(t: ^testing.T) {
 test_md5 :: proc(t: ^testing.T) {
     // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
     // Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
@@ -375,80 +338,6 @@ test_keccak_512 :: proc(t: ^testing.T) {
     }
     }
 }
 }
 
 
-@(test)
-test_whirlpool :: proc(t: ^testing.T) {
-    // Test vectors from 
-    // https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html
-    test_vectors := [?]TestHash {
-        TestHash{"19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3", ""},
-        TestHash{"8aca2602792aec6f11a67206531fb7d7f0dff59413145e6973c45001d0087b42d11bc645413aeff63a42391a39145a591a92200d560195e53b478584fdae231a", "a"},
-        TestHash{"33e24e6cbebf168016942df8a7174048f9cebc45cbd829c3b94b401a498acb11c5abcca7f2a1238aaf534371e87a4e4b19758965d5a35a7cad87cf5517043d97", "ab"},
-        TestHash{"4e2448a4c6f486bb16b6562c73b4020bf3043e3a731bce721ae1b303d97e6d4c7181eebdb6c57e277d0e34957114cbd6c797fc9d95d8b582d225292076d4eef5", "abc"},
-        TestHash{"bda164f0b930c43a1bacb5df880b205d15ac847add35145bf25d991ae74f0b72b1ac794f8aacda5fcb3c47038c954742b1857b5856519de4d1e54bfa2fa4eac5", "abcd"},
-        TestHash{"5d745e26ccb20fe655d39c9e7f69455758fbae541cb892b3581e4869244ab35b4fd6078f5d28b1f1a217452a67d9801033d92724a221255a5e377fe9e9e5f0b2", "abcde"},
-        TestHash{"a73e425459567308ba5f9eb2ae23570d0d0575eb1357ecf6ac88d4e0358b0ac3ea2371261f5d4c070211784b525911b9eec0ad968429bb7c7891d341cff4e811", "abcdef"},
-        TestHash{"08b388f68fd3eb51906ac3d3c699b8e9c3ac65d7ceb49d2e34f8a482cbc3082bc401cead90e85a97b8647c948bf35e448740b79659f3bee42145f0bd653d1f25", "abcdefg"},
-        TestHash{"1f1a84d30612820243afe2022712f9dac6d07c4c8bb41b40eacab0184c8d82275da5bcadbb35c7ca1960ff21c90acbae8c14e48d9309e4819027900e882c7ad9", "abcdefgh"},
-        TestHash{"11882bc9a31ac1cf1c41dcd9fd6fdd3ccdb9b017fc7f4582680134f314d7bb49af4c71f5a920bc0a6a3c1ff9a00021bf361d9867fe636b0bc1da1552e4237de4", "abcdefghi"},
-        TestHash{"717163de24809ffcf7ff6d5aba72b8d67c2129721953c252a4ddfb107614be857cbd76a9d5927de14633d6bdc9ddf335160b919db5c6f12cb2e6549181912eef", "abcdefghij"},
-        TestHash{"b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c", "The quick brown fox jumps over the lazy eog"},
-    }
-    for v, _ in test_vectors {
-        computed     := whirlpool.hash(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_gost :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"981e5f3ca30c841487830f84fb433e13ac1101569b9c13584ac483234cd656c0", ""},
-        TestHash{"e74c52dd282183bf37af0079c9f78055715a103f17e3133ceff1aacf2f403011", "a"},
-        TestHash{"b285056dbf18d7392d7677369524dd14747459ed8143997e163b2986f92fd42c", "abc"},
-        TestHash{"bc6041dd2aa401ebfa6e9886734174febdb4729aa972d60f549ac39b29721ba0", "message digest"},
-        TestHash{"9004294a361a508c586fe53d1f1b02746765e71b765472786e4770d565830a76", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"73b70a39497de53a6e08c67b6d4db853540f03e9389299d9b0156ef7e85d0f61", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-        TestHash{"6bc7b38989b28cf93ae8842bf9d752905910a7528a61e5bce0782de43e610c90", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-        TestHash{"2cefc2f7b7bdc514e18ea57fa74ff357e7fa17d652c75f69cb1be7893ede48eb", "This is message, length=32 bytes"},
-        TestHash{"c3730c5cbccacf915ac292676f21e8bd4ef75331d9405e5f1a61dc3130a65011", "Suppose the original message has length = 50 bytes"},
-    }
-    for v, _ in test_vectors {
-        computed     := gost.hash(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_streebog_256 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb", ""},
-        TestHash{"3e7dea7f2384b6c5a3d0e24aaa29c05e89ddd762145030ec22c71a6db8b2c1f4", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"36816a824dcbe7d6171aa58500741f2ea2757ae2e1784ab72c5c3c6c198d71da", "The quick brown fox jumps over the lazy dog."},
-    }
-    for v, _ in test_vectors {
-        computed     := streebog.hash_256(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_streebog_512 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a", ""},
-        TestHash{"d2b793a0bb6cb5904828b5b6dcfb443bb8f33efc06ad09368878ae4cdc8245b97e60802469bed1e7c21a64ff0b179a6a1e0bb74d92965450a0adab69162c00fe", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"fe0c42f267d921f940faa72bd9fcf84f9f1bd7e9d055e9816e4c2ace1ec83be82d2957cd59b86e123d8f5adee80b3ca08a017599a9fc1a14d940cf87c77df070", "The quick brown fox jumps over the lazy dog."},
-    }
-    for v, _ in test_vectors {
-        computed     := streebog.hash_512(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
 @(test)
 @(test)
 test_blake2b :: proc(t: ^testing.T) {
 test_blake2b :: proc(t: ^testing.T) {
     test_vectors := [?]TestHash {
     test_vectors := [?]TestHash {
@@ -462,86 +351,6 @@ test_blake2b :: proc(t: ^testing.T) {
     }
     }
 }
 }
 
 
-@(test)
-test_ripemd_160 :: proc(t: ^testing.T) {
-    // Test vectors from 
-    // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
-    test_vectors := [?]TestHash {
-        TestHash{"9c1185a5c5e9fc54612808977ee8f548b2258d31", ""},
-        TestHash{"0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", "a"},
-        TestHash{"8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", "abc"},
-        TestHash{"5d0689ef49d2fae572b881b123a85ffa21595f36", "message digest"},
-        TestHash{"f71c27109c692c1b56bbdceb5b9d2865b3708dbc", "abcdefghijklmnopqrstuvwxyz"},
-        TestHash{"12a053384a9c0c88e405a06c27dcf49ada62eb2b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-        TestHash{"b0e20b6e3116640286ed3a87a5713079b21f5189", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-    }
-    for v, _ in test_vectors {
-        computed     := ripemd.hash_160(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_tiger_128 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"3293ac630c13f0245f92bbb1766e1616", ""},
-        TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc", "a"},
-        TestHash{"2aab1484e8c158f2bfb8c5ff41b57a52", "abc"},
-        TestHash{"d981f8cb78201a950dcf3048751e441c", "message digest"},
-        TestHash{"1714a472eee57d30040412bfcc55032a", "abcdefghijklmnopqrstuvwxyz"},
-        TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-        TestHash{"8dcea680a17583ee502ba38a3c368651", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-        TestHash{"1c14795529fd9f207a958f84c52f11e8", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-        TestHash{"6d12a41e72e644f017b6f0e2f7b44c62", "The quick brown fox jumps over the lazy dog"},
-    }
-    for v, _ in test_vectors {
-        computed     := tiger.hash_128(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_tiger_160 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"3293ac630c13f0245f92bbb1766e16167a4e5849", ""},
-        TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f", "a"},
-        TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c", "abc"},
-        TestHash{"d981f8cb78201a950dcf3048751e441c517fca1a", "message digest"},
-        TestHash{"1714a472eee57d30040412bfcc55032a0b11602f", "abcdefghijklmnopqrstuvwxyz"},
-        TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-        TestHash{"8dcea680a17583ee502ba38a3c368651890ffbcc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-        TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cab", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-        TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5", "The quick brown fox jumps over the lazy dog"},
-    }
-    for v, _ in test_vectors {
-        computed     := tiger.hash_160(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_tiger_192 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"3293ac630c13f0245f92bbb1766e16167a4e58492dde73f3", ""},
-        TestHash{"77befbef2e7ef8ab2ec8f93bf587a7fc613e247f5f247809", "a"},
-        TestHash{"2aab1484e8c158f2bfb8c5ff41b57a525129131c957b5f93", "abc"},
-        TestHash{"d981f8cb78201a950dcf3048751e441c517fca1aa55a29f6", "message digest"},
-        TestHash{"1714a472eee57d30040412bfcc55032a0b11602ff37beee9", "abcdefghijklmnopqrstuvwxyz"},
-        TestHash{"0f7bf9a19b9c58f2b7610df7e84f0ac3a71c631e7b53f78e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-        TestHash{"8dcea680a17583ee502ba38a3c368651890ffbccdc49a8cc", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-        TestHash{"1c14795529fd9f207a958f84c52f11e887fa0cabdfd91bfd", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-        TestHash{"6d12a41e72e644f017b6f0e2f7b44c6285f06dd5d2c5b075", "The quick brown fox jumps over the lazy dog"},
-    }
-    for v, _ in test_vectors {
-        computed     := tiger.hash_192(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
 @(test)
 @(test)
 test_sm3 :: proc(t: ^testing.T) {
 test_sm3 :: proc(t: ^testing.T) {
     test_vectors := [?]TestHash {
     test_vectors := [?]TestHash {
@@ -558,34 +367,6 @@ test_sm3 :: proc(t: ^testing.T) {
     }
     }
 }
 }
 
 
-@(test)
-test_skein512_256 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"39ccc4554a8b31853b9de7a1fe638a24cce6b35a55f2431009e18780335d2621", ""},
-        TestHash{"b3250457e05d3060b1a4bbc1428bc75a3f525ca389aeab96cfa34638d96e492a", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"41e829d7fca71c7d7154ed8fc8a069f274dd664ae0ed29d365d919f4e575eebb", "The quick brown fox jumps over the lazy dog."},
-    }
-    for v, _ in test_vectors {
-        computed     := skein512.hash_256(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
-@(test)
-test_skein512_512 :: proc(t: ^testing.T) {
-    test_vectors := [?]TestHash {
-        TestHash{"bc5b4c50925519c290cc634277ae3d6257212395cba733bbad37a4af0fa06af41fca7903d06564fea7a2d3730dbdb80c1f85562dfcc070334ea4d1d9e72cba7a", ""},
-        TestHash{"94c2ae036dba8783d0b3f7d6cc111ff810702f5c77707999be7e1c9486ff238a7044de734293147359b4ac7e1d09cd247c351d69826b78dcddd951f0ef912713", "The quick brown fox jumps over the lazy dog"},
-        TestHash{"658223cb3d69b5e76e3588ca63feffba0dc2ead38a95d0650564f2a39da8e83fbb42c9d6ad9e03fbfde8a25a880357d457dbd6f74cbcb5e728979577dbce5436", "The quick brown fox jumps over the lazy dog."},
-    }
-    for v, _ in test_vectors {
-        computed     := skein512.hash_512(v.str)
-        computed_str := hex_string(computed[:])
-        expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-    }
-}
-
 @(test)
 @(test)
 test_siphash_2_4 :: proc(t: ^testing.T) {
 test_siphash_2_4 :: proc(t: ^testing.T) {
     // Test vectors from 
     // Test vectors from 

+ 27 - 28
vendor/botan/README.md

@@ -1,40 +1,38 @@
 # botan
 # botan
-A wrapper for the Botan crypto library
+
+A wrapper for the Botan cryptography library
 
 
 ## Supported
 ## Supported
 This library offers full bindings for everything exposed by Botan's FFI.
 This library offers full bindings for everything exposed by Botan's FFI.
-Wrappers for hashing algorithms have been added to match the API within the Odin `core:crypto` library.  
+Wrappers for hashing algorithms have been added to match the API within
+the Odin `core:crypto` library.
 
 
 ## Hashing algorithms
 ## Hashing algorithms
+
 | Algorithm                                                                                                    |                  |
 | Algorithm                                                                                                    |                  |
 |:-------------------------------------------------------------------------------------------------------------|:-----------------|
 |:-------------------------------------------------------------------------------------------------------------|:-----------------|
 | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | &#10004;&#65039; |
 | [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | &#10004;&#65039; |
-| [GOST](https://datatracker.ietf.org/doc/html/rfc5831)                                                        | &#10004;&#65039; |
-| [Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                           | &#10004;&#65039; |
-| [MD4](https://datatracker.ietf.org/doc/html/rfc1320)                                                         | &#10004;&#65039; |
-| [MD5](https://datatracker.ietf.org/doc/html/rfc1321)                                                         | &#10004;&#65039; |
-| [RIPEMD-160](https://homes.esat.kuleuven.be/~bosselae/ripemd160.html)                                        | &#10004;&#65039; |
-| [SHA-1](https://datatracker.ietf.org/doc/html/rfc3174)                                                       | &#10004;&#65039; |
 | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | &#10004;&#65039; |
 | [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | &#10004;&#65039; |
 | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
 | [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
 | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
 | [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
-| [Skein-512](https://www.schneier.com/academic/skein/)                                                        | &#10004;&#65039; |
 | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02)                                           | &#10004;&#65039; |
 | [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02)                                           | &#10004;&#65039; |
-| [Streebog](https://datatracker.ietf.org/doc/html/rfc6986)                                                    | &#10004;&#65039; |
-| [Tiger](https://www.cs.technion.ac.il/~biham/Reports/Tiger/)                                                 | &#10004;&#65039; |
-| [Whirlpool](https://web.archive.org/web/20171129084214/http://www.larc.usp.br/~pbarreto/WhirlpoolPage.html)  | &#10004;&#65039; |
+| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                    | &#10004;&#65039; |
+| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321)                                                  | &#10004;&#65039; |
+| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174)                                                | &#10004;&#65039; |
 
 
 #### High level API
 #### High level API
-Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`.  
+
+Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`.
 Included in these groups are six procedures.
 Included in these groups are six procedures.
-* `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
-* `hash_bytes` - Hash a given byte slice and return the computed hash
-* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
-* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
-* `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
-* `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
+- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
+- `hash_bytes` - Hash a given byte slice and return the computed hash
+- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
+- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
+- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
+- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
 
 
 #### Low level API
 #### Low level API
+
 The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
 The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
 You may also directly call them, if you wish.
 You may also directly call them, if you wish.
 
 
@@ -43,28 +41,29 @@ You may also directly call them, if you wish.
 package crypto_example
 package crypto_example
 
 
 // Import the desired package
 // Import the desired package
-import "vendor:botan/md4"
+import "vendor:botan/blake2b"
 
 
 main :: proc() {
 main :: proc() {
     input := "foo"
     input := "foo"
 
 
     // Compute the hash, using the high level API
     // Compute the hash, using the high level API
-    computed_hash := md4.hash(input)
+    computed_hash := blake2b.hash(input)
 
 
     // Variant that takes a destination buffer, instead of returning the computed hash
     // Variant that takes a destination buffer, instead of returning the computed hash
-    hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
-    md4.hash(input, hash[:])
+    hash := make([]byte, blake2b.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
+    blake2b.hash(input, hash[:])
 
 
     // Compute the hash, using the low level API
     // Compute the hash, using the low level API
     // @note: Botan's structs are opaque by design, they don't expose any fields
     // @note: Botan's structs are opaque by design, they don't expose any fields
-    ctx: md4.Md4_Context
-    computed_hash_low: [16]byte
-    md4.init(&ctx)
-    md4.update(&ctx, transmute([]byte)input)
-    md4.final(&ctx, computed_hash_low[:])
+    ctx: blake2b.Context
+    computed_hash_low: [blake2b.DIGEST_SIZE]byte
+    blake2b.init(&ctx)
+    blake2b.update(&ctx, transmute([]byte)input)
+    blake2b.final(&ctx, computed_hash_low[:])
 }
 }
 ```
 ```
 For example uses of all available algorithms, please see the tests within `tests/vendor/botan`.
 For example uses of all available algorithms, please see the tests within `tests/vendor/botan`.
 
 
 ### License
 ### License
+
 This library is made available under the BSD-3 license.
 This library is made available under the BSD-3 license.

+ 0 - 14
vendor/botan/bindings/botan.odin

@@ -74,23 +74,9 @@ HASH_SHA3_512       :: "SHA-3(512)"
 HASH_SHAKE_128      :: "SHAKE-128"
 HASH_SHAKE_128      :: "SHAKE-128"
 HASH_SHAKE_256      :: "SHAKE-256"
 HASH_SHAKE_256      :: "SHAKE-256"
 HASH_KECCAK_512     :: "Keccak-1600"
 HASH_KECCAK_512     :: "Keccak-1600"
-HASH_RIPEMD_160     :: "RIPEMD-160"
-HASH_WHIRLPOOL      :: "Whirlpool"
 HASH_BLAKE2B        :: "BLAKE2b"
 HASH_BLAKE2B        :: "BLAKE2b"
-HASH_MD4            :: "MD4"
 HASH_MD5            :: "MD5"
 HASH_MD5            :: "MD5"
-HASH_TIGER_128      :: "Tiger(16,3)"
-HASH_TIGER_160      :: "Tiger(20,3)"
-HASH_TIGER_192      :: "Tiger(24,3)"
-HASH_GOST           :: "GOST-34.11"
-HASH_STREEBOG_256   :: "Streebog-256"
-HASH_STREEBOG_512   :: "Streebog-512"
 HASH_SM3            :: "SM3"
 HASH_SM3            :: "SM3"
-HASH_SKEIN_512_256  :: "Skein-512(256)"
-HASH_SKEIN_512_512  :: "Skein-512(512)"
-
-// Not real values from Botan, only used for context setup within the crypto lib
-HASH_SKEIN_512   :: "SKEIN_512"
 
 
 MAC_HMAC_SHA1    :: "HMAC(SHA1)"
 MAC_HMAC_SHA1    :: "HMAC(SHA1)"
 MAC_HMAC_SHA_224 :: "HMAC(SHA-224)"
 MAC_HMAC_SHA_224 :: "HMAC(SHA-224)"

+ 18 - 21
vendor/botan/blake2b/blake2b.odin

@@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
 // computed hash
 // computed hash
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0)
+    ctx: Context
+    init(&ctx)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+		    update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
@@ -105,17 +102,17 @@ hash :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Blake2b_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t) {
+init :: proc "contextless" (ctx: ^Context) {
     botan.hash_init(ctx, botan.HASH_BLAKE2B, 0)
     botan.hash_init(ctx, botan.HASH_BLAKE2B, 0)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 0 - 121
vendor/botan/gost/gost.odin

@@ -1,121 +0,0 @@
-package vendor_gost
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog:  Initial implementation.
-
-    Interface for the GOST hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 32
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_GOST, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_GOST, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_GOST, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-Gost_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t) {
-    botan.hash_init(ctx, botan.HASH_GOST, 0)
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 10 - 0
vendor/botan/legacy/README.md

@@ -0,0 +1,10 @@
+# crypto/legacy
+
+These are algorithms that are shipped solely for the purpose of
+interoperability with legacy systems.  The use of these packages in
+any other capacity is discouraged, especially those that are known
+to be broken.
+
+- keccak - The draft version of the algorithm that became SHA-3
+- MD5 - Broken (https://eprint.iacr.org/2005/075)
+- SHA-1 - Broken (https://eprint.iacr.org/2017/190)

+ 19 - 22
vendor/botan/keccak/keccak.odin → vendor/botan/legacy/keccak/keccak.odin

@@ -14,7 +14,7 @@ package vendor_keccak
 import "core:os"
 import "core:os"
 import "core:io"
 import "core:io"
 
 
-import botan "../bindings"
+import botan "../../bindings"
 
 
 /*
 /*
     High level API
     High level API
@@ -32,11 +32,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 // computed hash
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -52,31 +51,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0)
+    ctx: Context
+    init(&ctx)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_512 will read the file provided by the given handle
 // hash_file_512 will read the file provided by the given handle
@@ -105,17 +102,17 @@ hash_512 :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Keccak_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t) {
+init :: proc "contextless" (ctx: ^Context) {
     botan.hash_init(ctx, botan.HASH_KECCAK_512, 0)
     botan.hash_init(ctx, botan.HASH_KECCAK_512, 0)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 19 - 22
vendor/botan/md5/md5.odin → vendor/botan/legacy/md5/md5.odin

@@ -14,7 +14,7 @@ package vendor_md5
 import "core:os"
 import "core:os"
 import "core:io"
 import "core:io"
 
 
-import botan "../bindings"
+import botan "../../bindings"
 
 
 /*
 /*
     High level API
     High level API
@@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
 // computed hash
 // computed hash
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD5, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD5, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD5, 0)
+    ctx: Context
+    init(&ctx)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
@@ -105,17 +102,17 @@ hash :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Md5_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t) {
+init :: proc "contextless" (ctx: ^Context) {
     botan.hash_init(ctx, botan.HASH_MD5, 0)
     botan.hash_init(ctx, botan.HASH_MD5, 0)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 19 - 22
vendor/botan/sha1/sha1.odin → vendor/botan/legacy/sha1/sha1.odin

@@ -14,7 +14,7 @@ package vendor_sha1
 import "core:os"
 import "core:os"
 import "core:io"
 import "core:io"
 
 
-import botan "../bindings"
+import botan "../../bindings"
 
 
 /*
 /*
     High level API
     High level API
@@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
 // computed hash
 // computed hash
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA1, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA1, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA1, 0)
+    ctx: Context
+    init(&ctx)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
@@ -105,17 +102,17 @@ hash :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Sha1_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t) {
+init :: proc "contextless" (ctx: ^Context) {
     botan.hash_init(ctx, botan.HASH_SHA1, 0)
     botan.hash_init(ctx, botan.HASH_SHA1, 0)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 0 - 121
vendor/botan/md4/md4.odin

@@ -1,121 +0,0 @@
-package vendor_md4
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog:  Initial implementation.
-
-    Interface for the MD4 hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 16
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD4, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD4, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_MD4, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-Md4_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t) {
-    botan.hash_init(ctx, botan.HASH_MD4, 0)
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 0 - 121
vendor/botan/ripemd/ripemd.odin

@@ -1,121 +0,0 @@
-package vendor_ripemd
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Interface for the RIPEMD-160 hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_160 :: 20
-
-// hash_string_160 will hash the given input and return the
-// computed hash
-hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160(transmute([]byte)(data))
-}
-
-// hash_bytes_160 will hash the given input and return the
-// computed hash
-hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_160 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_160 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_160 will read the file provided by the given handle
-// and compute a hash
-hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160 :: proc {
-    hash_stream_160,
-    hash_file_160,
-    hash_bytes_160,
-    hash_string_160,
-    hash_bytes_to_buffer_160,
-    hash_string_to_buffer_160,
-}
-
-/*
-    Low level API
-*/
-
-Ripemd160_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t) {
-    botan.hash_init(ctx, botan.HASH_RIPEMD_160, 0)
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 60 - 72
vendor/botan/sha2/sha2.odin

@@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
 // computed hash
 // computed hash
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
     hash: [DIGEST_SIZE_224]byte
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_224 will read the stream in chunks and compute a
 // hash_stream_224 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
     hash: [DIGEST_SIZE_224]byte
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_224, 0)
+    ctx: Context
+    init(&ctx, hash_size = 224)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_224 will read the file provided by the given handle
 // hash_file_224 will read the file provided by the given handle
@@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 // computed hash
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_256, 0)
+    ctx: Context
+    init(&ctx, hash_size = 256)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_256 will read the file provided by the given handle
 // hash_file_256 will read the file provided by the given handle
@@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
 // computed hash
 // computed hash
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
     hash: [DIGEST_SIZE_384]byte
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_384 will read the stream in chunks and compute a
 // hash_stream_384 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
     hash: [DIGEST_SIZE_384]byte
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_384, 0)
+    ctx: Context
+    init(&ctx, hash_size = 384)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_384 will read the file provided by the given handle
 // hash_file_384 will read the file provided by the given handle
@@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 // computed hash
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA_512, 0)
+    ctx: Context
+    init(&ctx, hash_size = 512)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_512 will read the file provided by the given handle
 // hash_file_512 will read the file provided by the given handle
@@ -345,9 +333,9 @@ hash_512 :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Sha2_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
+init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
     switch hash_size {
     switch hash_size {
         case 224: botan.hash_init(ctx, botan.HASH_SHA_224, 0)
         case 224: botan.hash_init(ctx, botan.HASH_SHA_224, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHA_256, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHA_256, 0)
@@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
     }
     }
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 60 - 72
vendor/botan/sha3/sha3.odin

@@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
 // computed hash
 // computed hash
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
     hash: [DIGEST_SIZE_224]byte
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_224 will read the stream in chunks and compute a
 // hash_stream_224 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
     hash: [DIGEST_SIZE_224]byte
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
+    ctx: Context
+    init(&ctx, hash_size = 224)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_224 will read the file provided by the given handle
 // hash_file_224 will read the file provided by the given handle
@@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 // computed hash
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
+    ctx: Context
+    init(&ctx, hash_size = 256)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_256 will read the file provided by the given handle
 // hash_file_256 will read the file provided by the given handle
@@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
 // computed hash
 // computed hash
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
     hash: [DIGEST_SIZE_384]byte
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_384 will read the stream in chunks and compute a
 // hash_stream_384 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
     hash: [DIGEST_SIZE_384]byte
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
+    ctx: Context
+    init(&ctx, hash_size = 384)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_384 will read the file provided by the given handle
 // hash_file_384 will read the file provided by the given handle
@@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 // computed hash
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
     hash: [DIGEST_SIZE_512]byte
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
+    ctx: Context
+    init(&ctx, hash_size = 512)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_512 will read the file provided by the given handle
 // hash_file_512 will read the file provided by the given handle
@@ -345,9 +333,9 @@ hash_512 :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Sha3_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
+init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
     switch hash_size {
     switch hash_size {
         case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
         case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
@@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
     }
     }
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 32 - 38
vendor/botan/shake/shake.odin

@@ -33,11 +33,10 @@ hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
 // computed hash
 // computed hash
 hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
 hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
     hash: [DIGEST_SIZE_128]byte
     hash: [DIGEST_SIZE_128]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 128)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -53,31 +52,29 @@ hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 128)
+    update(&ctx, data)
+    final(&ctx, hash)
 }
 }
 
 
 // hash_stream_128 will read the stream in chunks and compute a
 // hash_stream_128 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
 hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
     hash: [DIGEST_SIZE_128]byte
     hash: [DIGEST_SIZE_128]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0)
+    ctx: Context
+    init(&ctx, hash_size = 128)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_128 will read the file provided by the given handle
 // hash_file_128 will read the file provided by the given handle
@@ -112,11 +109,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 // computed hash
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -132,31 +128,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash)
 }
 }
 
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
     hash: [DIGEST_SIZE_256]byte
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0)
+    ctx: Context
+    init(&ctx, hash_size = 256)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file_256 will read the file provided by the given handle
 // hash_file_256 will read the file provided by the given handle
@@ -185,20 +179,20 @@ hash_256 :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Shake_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 256) {
+init :: proc "contextless" (ctx: ^Context, hash_size := 256) {
     switch hash_size {
     switch hash_size {
         case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0)
         case 128: botan.hash_init(ctx, botan.HASH_SHAKE_128, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHAKE_256, 0)
     }
     }
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 4 - 4
vendor/botan/siphash/siphash.odin

@@ -14,7 +14,7 @@ package vendor_siphash
 */
 */
 
 
 import "core:crypto"
 import "core:crypto"
-import "core:crypto/util"
+import "core:encoding/endian"
 
 
 import botan "../bindings"
 import botan "../bindings"
 
 
@@ -35,7 +35,7 @@ sum_bytes_1_3 :: proc (msg, key: []byte) -> u64 {
     init(&ctx, key[:], 1, 3)
     init(&ctx, key[:], 1, 3)
     update(&ctx, msg[:])
     update(&ctx, msg[:])
     final(&ctx, dst[:])
     final(&ctx, dst[:])
-    return util.U64_LE(dst[:])
+    return endian.unchecked_get_u64le(dst[:])
 }
 }
 
 
 // sum_string_to_buffer_1_3 will hash the given message with the key and write
 // sum_string_to_buffer_1_3 will hash the given message with the key and write
@@ -94,7 +94,7 @@ sum_bytes_2_4 :: proc (msg, key: []byte) -> u64 {
     init(&ctx, key[:])
     init(&ctx, key[:])
     update(&ctx, msg[:])
     update(&ctx, msg[:])
     final(&ctx, dst[:])
     final(&ctx, dst[:])
-    return util.U64_LE(dst[:])
+    return endian.unchecked_get_u64le(dst[:])
 }
 }
 
 
 // sum_string_to_buffer_2_4 will hash the given message with the key and write
 // sum_string_to_buffer_2_4 will hash the given message with the key and write
@@ -172,7 +172,7 @@ sum_bytes_4_8 :: proc (msg, key: []byte) -> u64 {
     init(&ctx, key[:], 4, 8)
     init(&ctx, key[:], 4, 8)
     update(&ctx, msg[:])
     update(&ctx, msg[:])
     final(&ctx, dst[:])
     final(&ctx, dst[:])
-    return util.U64_LE(dst[:])
+    return endian.unchecked_get_u64le(dst[:])
 }
 }
 
 
 // sum_string_to_buffer_4_8 will hash the given message with the key and write
 // sum_string_to_buffer_4_8 will hash the given message with the key and write

+ 0 - 286
vendor/botan/skein512/skein512.odin

@@ -1,286 +0,0 @@
-package vendor_skein512
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog, dotbmp:  Initial implementation.
-
-    Interface for the SKEIN-512 hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-import "core:strings"
-import "core:fmt"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_512 :: 64
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-// hash_string_slice will hash the given input and return the
-// computed hash
-hash_string_slice :: proc(data: string, bit_size: int, allocator := context.allocator) -> []byte {
-    return hash_bytes_slice(transmute([]byte)(data), bit_size, allocator)
-}
-
-// hash_bytes_slice will hash the given input and return the
-// computed hash
-hash_bytes_slice :: proc(data: []byte, bit_size: int, allocator := context.allocator) -> []byte {
-    hash := make([]byte, bit_size, allocator)
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_slice :: proc(data: string, hash: []byte, bit_size: int, allocator := context.allocator) {
-    hash_bytes_to_buffer_slice(transmute([]byte)(data), hash, bit_size, allocator)
-}
-
-// hash_bytes_to_buffer_slice will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_slice :: proc(data, hash: []byte, bit_size: int, allocator := context.allocator) {
-    assert(len(hash) >= bit_size, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_slice will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_slice :: proc(s: io.Stream, bit_size: int, allocator := context.allocator) -> ([]byte, bool) {
-    hash := make([]byte, bit_size, allocator)
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_slice will read the file provided by the given handle
-// and compute a hash
-hash_file_slice :: proc(hd: os.Handle, bit_size: int, load_at_once := false, allocator := context.allocator) -> ([]byte, bool) {
-    if !load_at_once {
-        return hash_stream_slice(os.stream_from_handle(hd), bit_size, allocator)
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_slice(buf[:], bit_size, allocator), ok
-        }
-    }
-    return nil, false
-}
-
-hash_slice :: proc {
-    hash_stream_slice,
-    hash_file_slice,
-    hash_bytes_slice,
-    hash_string_slice,
-    hash_bytes_to_buffer_slice,
-    hash_string_to_buffer_slice,
-}
-
-/*
-    Low level API
-*/
-
-Skein512_Context :: botan.hash_t
-
-init :: proc(ctx: ^botan.hash_t, hash_size := 512) {
-    switch hash_size {
-        case 256:  botan.hash_init(ctx, botan.HASH_SKEIN_512_256,  0)
-        case 512:  botan.hash_init(ctx, botan.HASH_SKEIN_512_512,  0)
-        case:      botan.hash_init(ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", hash_size)), 0)
-    }
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 18 - 21
vendor/botan/sm3/sm3.odin

@@ -32,11 +32,10 @@ hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
 // computed hash
 // computed hash
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
 hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SM3, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
     return hash
 }
 }
 
 
@@ -52,31 +51,29 @@ hash_string_to_buffer :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
 hash_bytes_to_buffer :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
     assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SM3, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 }
 
 
 // hash_stream will read the stream in chunks and compute a
 // hash_stream will read the stream in chunks and compute a
 // hash from its contents
 // hash from its contents
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
 hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
     hash: [DIGEST_SIZE]byte
     hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SM3, 0)
+    ctx: Context
+    init(&ctx)
     buf := make([]byte, 512)
     buf := make([]byte, 512)
     defer delete(buf)
     defer delete(buf)
     i := 1
     i := 1
     for i > 0 {
     for i > 0 {
         i, _ = io.read(s, buf)
         i, _ = io.read(s, buf)
         if i > 0 {
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 }
 
 
 // hash_file will read the file provided by the given handle
 // hash_file will read the file provided by the given handle
@@ -105,17 +102,17 @@ hash :: proc {
     Low level API
     Low level API
 */
 */
 
 
-Sm3_Context :: botan.hash_t
+Context :: botan.hash_t
 
 
-init :: proc "contextless" (ctx: ^botan.hash_t) {
+init :: proc "contextless" (ctx: ^Context) {
     botan.hash_init(ctx, botan.HASH_SM3, 0)
     botan.hash_init(ctx, botan.HASH_SM3, 0)
 }
 }
 
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 }
 
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
     botan.hash_destroy(ctx^)
 }
 }

+ 0 - 204
vendor/botan/streebog/streebog.odin

@@ -1,204 +0,0 @@
-package vendor_streebog
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog:  Initial implementation.
-
-    Interface for the Streebog hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_256 :: 32
-DIGEST_SIZE_512 :: 64
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-    return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-    hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-    if !load_at_once {
-        return hash_stream_256(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_256(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-    hash_stream_256,
-    hash_file_256,
-    hash_bytes_256,
-    hash_string_256,
-    hash_bytes_to_buffer_256,
-    hash_string_to_buffer_256,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-    return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-    hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-    if !load_at_once {
-        return hash_stream_512(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_512(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-    hash_stream_512,
-    hash_file_512,
-    hash_bytes_512,
-    hash_string_512,
-    hash_bytes_to_buffer_512,
-    hash_string_to_buffer_512,
-}
-
-/*
-    Low level API
-*/
-
-Streebog_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
-    switch hash_size {
-        case 256: botan.hash_init(ctx, botan.HASH_STREEBOG_256, 0)
-        case 512: botan.hash_init(ctx, botan.HASH_STREEBOG_512, 0)
-    }
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 0 - 285
vendor/botan/tiger/tiger.odin

@@ -1,285 +0,0 @@
-package vendor_tiger
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog:  Initial implementation.
-
-    Interface for the Tiger hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_160 :: 20
-DIGEST_SIZE_192 :: 24
-
-// hash_string_128 will hash the given input and return the
-// computed hash
-hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-    return hash_bytes_128(transmute([]byte)(data))
-}
-
-// hash_bytes_128 will hash the given input and return the
-// computed hash
-hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_128, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_128 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_128, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_128 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-    hash: [DIGEST_SIZE_128]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_128, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_128 will read the file provided by the given handle
-// and compute a hash
-hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-    if !load_at_once {
-        return hash_stream_128(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_128(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128 :: proc {
-    hash_stream_128,
-    hash_file_128,
-    hash_bytes_128,
-    hash_string_128,
-    hash_bytes_to_buffer_128,
-    hash_string_to_buffer_128,
-}
-
-// hash_string_160 will hash the given input and return the
-// computed hash
-hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte {
-    return hash_bytes_160(transmute([]byte)(data))
-}
-
-// hash_bytes_160 will hash the given input and return the
-// computed hash
-hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_160, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_160 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_160 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_160(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_160 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_160 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_160, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_160 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) {
-    hash: [DIGEST_SIZE_160]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_160, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_160 will read the file provided by the given handle
-// and compute a hash
-hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) {
-    if !load_at_once {
-        return hash_stream_160(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_160(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_160]byte{}, false
-}
-
-hash_160 :: proc {
-    hash_stream_160,
-    hash_file_160,
-    hash_bytes_160,
-    hash_string_160,
-    hash_bytes_to_buffer_160,
-    hash_string_to_buffer_160,
-}
-
-// hash_string_192 will hash the given input and return the
-// computed hash
-hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte {
-    return hash_bytes_192(transmute([]byte)(data))
-}
-
-// hash_bytes_192 will hash the given input and return the
-// computed hash
-hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_192, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer_192 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_192 :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer_192(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_192 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_192 :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_192, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream_192 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) {
-    hash: [DIGEST_SIZE_192]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_TIGER_192, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file_192 will read the file provided by the given handle
-// and compute a hash
-hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) {
-    if !load_at_once {
-        return hash_stream_192(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes_192(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE_192]byte{}, false
-}
-
-hash_192 :: proc {
-    hash_stream_192,
-    hash_file_192,
-    hash_bytes_192,
-    hash_string_192,
-    hash_bytes_to_buffer_192,
-    hash_string_to_buffer_192,
-}
-
-/*
-    Low level API
-*/
-
-Tiger_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 192) {
-    switch hash_size {
-        case 128: botan.hash_init(ctx, botan.HASH_TIGER_128, 0)
-        case 160: botan.hash_init(ctx, botan.HASH_TIGER_160, 0)
-        case 192: botan.hash_init(ctx, botan.HASH_TIGER_192, 0)
-    }
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

+ 0 - 121
vendor/botan/whirlpool/whirlpool.odin

@@ -1,121 +0,0 @@
-package vendor_whirlpool
-
-/*
-    Copyright 2021 zhibog
-    Made available under the BSD-3 license.
-
-    List of contributors:
-        zhibog:  Initial implementation.
-
-    Interface for the WHIRLPOOL hashing algorithm.
-    The hash will be computed via bindings to the Botan crypto library
-*/
-
-import "core:os"
-import "core:io"
-
-import botan "../bindings"
-
-/*
-    High level API
-*/
-
-DIGEST_SIZE :: 64
-
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte {
-    return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-    hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-    assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-    hash: [DIGEST_SIZE]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0)
-    buf := make([]byte, 512)
-    defer delete(buf)
-    i := 1
-    for i > 0 {
-        i, _ = io.read(s, buf)
-        if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
-    }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
-}
-
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-    if !load_at_once {
-        return hash_stream(os.stream_from_handle(hd))
-    } else {
-        if buf, ok := os.read_entire_file(hd); ok {
-            return hash_bytes(buf[:]), ok
-        }
-    }
-    return [DIGEST_SIZE]byte{}, false
-}
-
-hash :: proc {
-    hash_stream,
-    hash_file,
-    hash_bytes,
-    hash_string,
-    hash_bytes_to_buffer,
-    hash_string_to_buffer,
-}
-
-/*
-    Low level API
-*/
-
-Whirlpool_Context :: botan.hash_t
-
-init :: proc "contextless" (ctx: ^botan.hash_t) {
-    botan.hash_init(ctx, botan.HASH_WHIRLPOOL, 0)
-}
-
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
-    botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
-}
-
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
-    botan.hash_final(ctx^, &hash[0])
-    botan.hash_destroy(ctx^)
-}

Some files were not shown because too many files changed in this diff