瀏覽代碼

core/crypto/sha3: API cleanup

- sha3.Sha3_Context -> sha3.Context
Yawning Angel 1 年之前
父節點
當前提交
b8f9deb3d8
共有 2 個文件被更改,包括 120 次插入124 次删除
  1. 60 52
      core/crypto/sha3/sha3.odin
  2. 60 72
      vendor/botan/sha3/sha3.odin

+ 60 - 52
core/crypto/sha3/sha3.odin

@@ -35,11 +35,11 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
 // computed hash
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
 	hash: [DIGEST_SIZE_224]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_224
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash[:])
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
 	return hash
 }
 
@@ -54,30 +54,32 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_224
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash)
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 
 // hash_stream_224 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
 	hash: [DIGEST_SIZE_224]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_224
-	_sha3.init(&ctx)
+	init(&ctx)
+
 	buf := make([]byte, 512)
 	defer delete(buf)
+
 	read := 1
 	for read > 0 {
 		read, _ = io.read(s, buf)
 		if read > 0 {
-			_sha3.update(&ctx, buf[:read])
+			update(&ctx, buf[:read])
 		}
 	}
-	_sha3.final(&ctx, hash[:])
+	final(&ctx, hash[:])
 	return hash, true
 }
 
@@ -113,11 +115,11 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
 	hash: [DIGEST_SIZE_256]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_256
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash[:])
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
 	return hash
 }
 
@@ -132,30 +134,32 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_256
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash)
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
 	hash: [DIGEST_SIZE_256]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_256
-	_sha3.init(&ctx)
+	init(&ctx)
+
 	buf := make([]byte, 512)
 	defer delete(buf)
+
 	read := 1
 	for read > 0 {
 		read, _ = io.read(s, buf)
 		if read > 0 {
-			_sha3.update(&ctx, buf[:read])
+			update(&ctx, buf[:read])
 		}
 	}
-	_sha3.final(&ctx, hash[:])
+	final(&ctx, hash[:])
 	return hash, true
 }
 
@@ -191,11 +195,11 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
 // computed hash
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
 	hash: [DIGEST_SIZE_384]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_384
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash[:])
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
 	return hash
 }
 
@@ -210,30 +214,32 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_384
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash)
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 
 // hash_stream_384 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
 	hash: [DIGEST_SIZE_384]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_384
-	_sha3.init(&ctx)
+	init(&ctx)
+
 	buf := make([]byte, 512)
 	defer delete(buf)
+
 	read := 1
 	for read > 0 {
 		read, _ = io.read(s, buf)
 		if read > 0 {
-			_sha3.update(&ctx, buf[:read])
+			update(&ctx, buf[:read])
 		}
 	}
-	_sha3.final(&ctx, hash[:])
+	final(&ctx, hash[:])
 	return hash, true
 }
 
@@ -269,11 +275,11 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
 	hash: [DIGEST_SIZE_512]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_512
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash[:])
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash[:])
 	return hash
 }
 
@@ -288,30 +294,32 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 // computed hash into the second parameter.
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_512
-	_sha3.init(&ctx)
-	_sha3.update(&ctx, data)
-	_sha3.final(&ctx, hash)
+	init(&ctx)
+	update(&ctx, data)
+	final(&ctx, hash)
 }
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
 	hash: [DIGEST_SIZE_512]byte
-	ctx: _sha3.Sha3_Context
+	ctx: Context
 	ctx.mdlen = DIGEST_SIZE_512
-	_sha3.init(&ctx)
+	init(&ctx)
+
 	buf := make([]byte, 512)
 	defer delete(buf)
+
 	read := 1
 	for read > 0 {
 		read, _ = io.read(s, buf)
 		if read > 0 {
-			_sha3.update(&ctx, buf[:read])
+			update(&ctx, buf[:read])
 		}
 	}
-	_sha3.final(&ctx, hash[:])
+	final(&ctx, hash[:])
 	return hash, true
 }
 
@@ -341,16 +349,16 @@ hash_512 :: proc {
     Low level API
 */
 
-Sha3_Context :: _sha3.Sha3_Context
+Context :: _sha3.Sha3_Context
 
-init :: proc(ctx: ^_sha3.Sha3_Context) {
+init :: proc(ctx: ^Context) {
 	_sha3.init(ctx)
 }
 
-update :: proc(ctx: ^_sha3.Sha3_Context, data: []byte) {
+update :: proc(ctx: ^Context, data: []byte) {
 	_sha3.update(ctx, data)
 }
 
-final :: proc(ctx: ^_sha3.Sha3_Context, hash: []byte) {
+final :: proc(ctx: ^Context, hash: []byte) {
 	_sha3.final(ctx, hash)
 }

+ 60 - 72
vendor/botan/sha3/sha3.odin

@@ -35,11 +35,10 @@ hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
 // computed hash
 hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
 }
 
@@ -55,31 +54,29 @@ hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 224)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 
 // hash_stream_224 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
     hash: [DIGEST_SIZE_224]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_224, 0)
+    ctx: Context
+    init(&ctx, hash_size = 224)
     buf := make([]byte, 512)
     defer delete(buf)
     i := 1
     for i > 0 {
         i, _ = io.read(s, buf)
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 
 // hash_file_224 will read the file provided by the given handle
@@ -114,11 +111,10 @@ hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
 // computed hash
 hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
 }
 
@@ -134,31 +130,29 @@ hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 256)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 
 // hash_stream_256 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
     hash: [DIGEST_SIZE_256]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_256, 0)
+    ctx: Context
+    init(&ctx, hash_size = 256)
     buf := make([]byte, 512)
     defer delete(buf)
     i := 1
     for i > 0 {
         i, _ = io.read(s, buf)
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 
 // hash_file_256 will read the file provided by the given handle
@@ -193,11 +187,10 @@ hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
 // computed hash
 hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
 }
 
@@ -213,31 +206,29 @@ hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 384)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 
 // hash_stream_384 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
     hash: [DIGEST_SIZE_384]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_384, 0)
+    ctx: Context
+    init(&ctx, hash_size = 384)
     buf := make([]byte, 512)
     defer delete(buf)
     i := 1
     for i > 0 {
         i, _ = io.read(s, buf)
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 
 // hash_file_384 will read the file provided by the given handle
@@ -272,11 +263,10 @@ hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
 // computed hash
 hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
     return hash
 }
 
@@ -292,31 +282,29 @@ hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
 // It requires that the destination buffer is at least as big as the digest size
 hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
     assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size")
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
-    botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data)))
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
+    ctx: Context
+    init(&ctx, hash_size = 512)
+    update(&ctx, data)
+    final(&ctx, hash[:])
 }
 
 // hash_stream_512 will read the stream in chunks and compute a
 // hash from its contents
 hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
     hash: [DIGEST_SIZE_512]byte
-    ctx: botan.hash_t
-    botan.hash_init(&ctx, botan.HASH_SHA3_512, 0)
+    ctx: Context
+    init(&ctx, hash_size = 512)
     buf := make([]byte, 512)
     defer delete(buf)
     i := 1
     for i > 0 {
         i, _ = io.read(s, buf)
         if i > 0 {
-            botan.hash_update(ctx, len(buf) == 0 ? nil : &buf[0], uint(i))
-        } 
+            update(&ctx, buf[:i])
+        }
     }
-    botan.hash_final(ctx, &hash[0])
-    botan.hash_destroy(ctx)
-    return hash, true 
+    final(&ctx, hash[:])
+    return hash, true
 }
 
 // hash_file_512 will read the file provided by the given handle
@@ -345,9 +333,9 @@ hash_512 :: proc {
     Low level API
 */
 
-Sha3_Context :: botan.hash_t
+Context :: botan.hash_t
 
-init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
+init :: proc "contextless" (ctx: ^Context, hash_size := 512) {
     switch hash_size {
         case 224: botan.hash_init(ctx, botan.HASH_SHA3_224, 0)
         case 256: botan.hash_init(ctx, botan.HASH_SHA3_256, 0)
@@ -356,11 +344,11 @@ init :: proc "contextless" (ctx: ^botan.hash_t, hash_size := 512) {
     }
 }
 
-update :: proc "contextless" (ctx: ^botan.hash_t, data: []byte) {
+update :: proc "contextless" (ctx: ^Context, data: []byte) {
     botan.hash_update(ctx^, len(data) == 0 ? nil : &data[0], uint(len(data)))
 }
 
-final :: proc "contextless" (ctx: ^botan.hash_t, hash: []byte) {
+final :: proc "contextless" (ctx: ^Context, hash: []byte) {
     botan.hash_final(ctx^, &hash[0])
     botan.hash_destroy(ctx^)
 }