|
@@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
|
|
|
// computed hash
|
|
|
hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
|
|
|
hash: [DIGEST_SIZE_224]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 224)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
return hash
|
|
|
}
|
|
|
|
|
@@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
|
|
|
// It requires that the destination buffer is at least as big as the digest size
|
|
|
hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
|
|
|
assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 224)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
}
|
|
|
|
|
|
// hash_stream_224 will read the stream in chunks and compute a
|
|
|
// hash from its contents
|
|
|
hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
|
|
|
hash: [DIGEST_SIZE_224]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 224)
|
|
|
buf := make([]byte, 512)
|
|
|
defer delete(buf)
|
|
|
i := 1
|
|
|
for i > 0 {
|
|
|
i, _ = io.read(s, buf)
|
|
|
if i > 0 {
|
|
|
- botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
|
|
|
- }
|
|
|
+ update(&ctx, buf[:i])
|
|
|
+ }
|
|
|
}
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
- return hash, true
|
|
|
+ final(&ctx, hash[:])
|
|
|
+ return hash, true
|
|
|
}
|
|
|
|
|
|
// hash_file_224 will read the file provided by the given handle
|
|
@@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
|
|
|
// computed hash
|
|
|
hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
|
|
|
hash: [DIGEST_SIZE_256]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 256)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
return hash
|
|
|
}
|
|
|
|
|
@@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
|
|
|
// It requires that the destination buffer is at least as big as the digest size
|
|
|
hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
|
|
|
assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 256)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
}
|
|
|
|
|
|
// hash_stream_256 will read the stream in chunks and compute a
|
|
|
// hash from its contents
|
|
|
hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
|
|
|
hash: [DIGEST_SIZE_256]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 256)
|
|
|
buf := make([]byte, 512)
|
|
|
defer delete(buf)
|
|
|
i := 1
|
|
|
for i > 0 {
|
|
|
i, _ = io.read(s, buf)
|
|
|
if i > 0 {
|
|
|
- botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
|
|
|
- }
|
|
|
+ update(&ctx, buf[:i])
|
|
|
+ }
|
|
|
}
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
- return hash, true
|
|
|
+ final(&ctx, hash[:])
|
|
|
+ return hash, true
|
|
|
}
|
|
|
|
|
|
// hash_file_256 will read the file provided by the given handle
|
|
@@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
|
|
|
// computed hash
|
|
|
hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
|
|
|
hash: [DIGEST_SIZE_384]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 384)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
return hash
|
|
|
}
|
|
|
|
|
@@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
|
|
|
// It requires that the destination buffer is at least as big as the digest size
|
|
|
hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
|
|
|
assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 384)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
}
|
|
|
|
|
|
// hash_stream_384 will read the stream in chunks and compute a
|
|
|
// hash from its contents
|
|
|
hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
|
|
|
hash: [DIGEST_SIZE_384]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 384)
|
|
|
buf := make([]byte, 512)
|
|
|
defer delete(buf)
|
|
|
i := 1
|
|
|
for i > 0 {
|
|
|
i, _ = io.read(s, buf)
|
|
|
if i > 0 {
|
|
|
- botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
|
|
|
- }
|
|
|
+ update(&ctx, buf[:i])
|
|
|
+ }
|
|
|
}
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
- return hash, true
|
|
|
+ final(&ctx, hash[:])
|
|
|
+ return hash, true
|
|
|
}
|
|
|
|
|
|
// hash_file_384 will read the file provided by the given handle
|
|
@@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
|
|
|
// computed hash
|
|
|
hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
|
|
|
hash: [DIGEST_SIZE_512]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 512)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
return hash
|
|
|
}
|
|
|
|
|
@@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
|
|
|
// It requires that the destination buffer is at least as big as the digest size
|
|
|
hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
|
|
|
assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
|
|
|
- botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 512)
|
|
|
+ update(&ctx, data)
|
|
|
+ final(&ctx, hash[:])
|
|
|
}
|
|
|
|
|
|
// hash_stream_512 will read the stream in chunks and compute a
|
|
|
// hash from its contents
|
|
|
hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
|
|
|
hash: [DIGEST_SIZE_512]byte
|
|
|
- ctx: botan.hash_t
|
|
|
- botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
|
|
|
+ ctx: Context
|
|
|
+ init(&ctx, hash_size = 512)
|
|
|
buf := make([]byte, 512)
|
|
|
defer delete(buf)
|
|
|
i := 1
|
|
|
for i > 0 {
|
|
|
i, _ = io.read(s, buf)
|
|
|
if i > 0 {
|
|
|
- botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
|
|
|
- }
|
|
|
+ update(&ctx, buf[:i])
|
|
|
+ }
|
|
|
}
|
|
|
- botan.hash_final(ctx, &hash[0])
|
|
|
- botan.hash_destroy(ctx)
|
|
|
- return hash, true
|
|
|
+ final(&ctx, hash[:])
|
|
|
+ return hash, true
|
|
|
}
|
|
|
|
|
|
// hash_file_512 will read the file provided by the given handle
|
|
@@ -345,9 +333,9 @@ hash_512 :: proc {
|
|
|
Low level API
|
|
|
*/
|
|
|
|
|
|
-Sha3_Context :: botan.hash_t
|
|
|
+Context :: botan.hash_t
|
|
|
|
|
|
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
|
|
|
+init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
|
|
|
switch hash_size {
|
|
|
case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
|
|
|
case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
|
|
@@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
|
|
|
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
|
|
|
botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
|
|
|
}
|
|
|
|
|
|
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
|
|
|
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
|
|
|
botan.hash_final(ctx^, &hash[0])
|
|
|
botan.hash_destroy(ctx^)
|
|
|
}
|