123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377 |
- package keccak
- /*
- Copyright 2021 zhibog
- Made available under the BSD-3 license.
- List of contributors:
- zhibog, dotbmp: Initial implementation.
- Interface for the Keccak hashing algorithm.
- This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
- */
- import "core:io"
- import "core:os"
- import "../../_sha3"
- /*
- High level API
- */
- DIGEST_SIZE_224 :: 28
- DIGEST_SIZE_256 :: 32
- DIGEST_SIZE_384 :: 48
- DIGEST_SIZE_512 :: 64
- // hash_string_224 will hash the given input and return the
- // computed hash
- hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
- return hash_bytes_224(transmute([]byte)(data))
- }
- // hash_bytes_224 will hash the given input and return the
- // computed hash
- hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
- hash: [DIGEST_SIZE_224]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_224
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash[:])
- return hash
- }
- // hash_string_to_buffer_224 will hash the given input and assign the
- // computed hash to the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
- hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
- }
- // hash_bytes_to_buffer_224 will hash the given input and write the
- // computed hash into the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_224
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash)
- }
- // hash_stream_224 will read the stream in chunks and compute a
- // hash from its contents
- hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
- hash: [DIGEST_SIZE_224]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_224
- ctx.is_keccak = true
- init(&ctx)
- buf := make([]byte, 512)
- defer delete(buf)
- read := 1
- for read > 0 {
- read, _ = io.read(s, buf)
- if read > 0 {
- update(&ctx, buf[:read])
- }
- }
- final(&ctx, hash[:])
- return hash, true
- }
- // hash_file_224 will read the file provided by the given handle
- // and compute a hash
- hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
- if !load_at_once {
- return hash_stream_224(os.stream_from_handle(hd))
- } else {
- if buf, ok := os.read_entire_file(hd); ok {
- return hash_bytes_224(buf[:]), ok
- }
- }
- return [DIGEST_SIZE_224]byte{}, false
- }
- hash_224 :: proc {
- hash_stream_224,
- hash_file_224,
- hash_bytes_224,
- hash_string_224,
- hash_bytes_to_buffer_224,
- hash_string_to_buffer_224,
- }
- // hash_string_256 will hash the given input and return the
- // computed hash
- hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
- return hash_bytes_256(transmute([]byte)(data))
- }
- // hash_bytes_256 will hash the given input and return the
- // computed hash
- hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
- hash: [DIGEST_SIZE_256]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_256
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash[:])
- return hash
- }
- // hash_string_to_buffer_256 will hash the given input and assign the
- // computed hash to the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
- hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
- }
- // hash_bytes_to_buffer_256 will hash the given input and write the
- // computed hash into the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_256
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash)
- }
- // hash_stream_256 will read the stream in chunks and compute a
- // hash from its contents
- hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
- hash: [DIGEST_SIZE_256]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_256
- ctx.is_keccak = true
- init(&ctx)
- buf := make([]byte, 512)
- defer delete(buf)
- read := 1
- for read > 0 {
- read, _ = io.read(s, buf)
- if read > 0 {
- update(&ctx, buf[:read])
- }
- }
- final(&ctx, hash[:])
- return hash, true
- }
- // hash_file_256 will read the file provided by the given handle
- // and compute a hash
- hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
- if !load_at_once {
- return hash_stream_256(os.stream_from_handle(hd))
- } else {
- if buf, ok := os.read_entire_file(hd); ok {
- return hash_bytes_256(buf[:]), ok
- }
- }
- return [DIGEST_SIZE_256]byte{}, false
- }
- hash_256 :: proc {
- hash_stream_256,
- hash_file_256,
- hash_bytes_256,
- hash_string_256,
- hash_bytes_to_buffer_256,
- hash_string_to_buffer_256,
- }
- // hash_string_384 will hash the given input and return the
- // computed hash
- hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
- return hash_bytes_384(transmute([]byte)(data))
- }
- // hash_bytes_384 will hash the given input and return the
- // computed hash
- hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
- hash: [DIGEST_SIZE_384]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_384
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash[:])
- return hash
- }
- // hash_string_to_buffer_384 will hash the given input and assign the
- // computed hash to the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
- hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
- }
- // hash_bytes_to_buffer_384 will hash the given input and write the
- // computed hash into the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_384
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash)
- }
- // hash_stream_384 will read the stream in chunks and compute a
- // hash from its contents
- hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
- hash: [DIGEST_SIZE_384]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_384
- ctx.is_keccak = true
- init(&ctx)
- buf := make([]byte, 512)
- defer delete(buf)
- read := 1
- for read > 0 {
- read, _ = io.read(s, buf)
- if read > 0 {
- update(&ctx, buf[:read])
- }
- }
- final(&ctx, hash[:])
- return hash, true
- }
- // hash_file_384 will read the file provided by the given handle
- // and compute a hash
- hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
- if !load_at_once {
- return hash_stream_384(os.stream_from_handle(hd))
- } else {
- if buf, ok := os.read_entire_file(hd); ok {
- return hash_bytes_384(buf[:]), ok
- }
- }
- return [DIGEST_SIZE_384]byte{}, false
- }
- hash_384 :: proc {
- hash_stream_384,
- hash_file_384,
- hash_bytes_384,
- hash_string_384,
- hash_bytes_to_buffer_384,
- hash_string_to_buffer_384,
- }
- // hash_string_512 will hash the given input and return the
- // computed hash
- hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
- return hash_bytes_512(transmute([]byte)(data))
- }
- // hash_bytes_512 will hash the given input and return the
- // computed hash
- hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
- hash: [DIGEST_SIZE_512]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_512
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash[:])
- return hash
- }
- // hash_string_to_buffer_512 will hash the given input and assign the
- // computed hash to the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
- hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
- }
- // hash_bytes_to_buffer_512 will hash the given input and write the
- // computed hash into the second parameter.
- // It requires that the destination buffer is at least as big as the digest size
- hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_512
- ctx.is_keccak = true
- init(&ctx)
- update(&ctx, data)
- final(&ctx, hash)
- }
- // hash_stream_512 will read the stream in chunks and compute a
- // hash from its contents
- hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
- hash: [DIGEST_SIZE_512]byte
- ctx: Context
- ctx.mdlen = DIGEST_SIZE_512
- ctx.is_keccak = true
- init(&ctx)
- buf := make([]byte, 512)
- defer delete(buf)
- read := 1
- for read > 0 {
- read, _ = io.read(s, buf)
- if read > 0 {
- update(&ctx, buf[:read])
- }
- }
- final(&ctx, hash[:])
- return hash, true
- }
- // hash_file_512 will read the file provided by the given handle
- // and compute a hash
- hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
- if !load_at_once {
- return hash_stream_512(os.stream_from_handle(hd))
- } else {
- if buf, ok := os.read_entire_file(hd); ok {
- return hash_bytes_512(buf[:]), ok
- }
- }
- return [DIGEST_SIZE_512]byte{}, false
- }
- hash_512 :: proc {
- hash_stream_512,
- hash_file_512,
- hash_bytes_512,
- hash_string_512,
- hash_bytes_to_buffer_512,
- hash_string_to_buffer_512,
- }
- /*
- Low level API
- */
- Context :: _sha3.Sha3_Context
- init :: proc(ctx: ^Context) {
- ctx.is_keccak = true
- _sha3.init(ctx)
- }
- update :: proc(ctx: ^Context, data: []byte) {
- _sha3.update(ctx, data)
- }
- final :: proc(ctx: ^Context, hash: []byte) {
- _sha3.final(ctx, hash)
- }
|