keccak.odin 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377
  1. package keccak
  2. /*
  3. Copyright 2021 zhibog
  4. Made available under the BSD-3 license.
  5. List of contributors:
  6. zhibog, dotbmp: Initial implementation.
  7. Interface for the Keccak hashing algorithm.
  8. This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
  9. */
  10. import "core:io"
  11. import "core:os"
  12. import "../../_sha3"
  13. /*
  14. High level API
  15. */
  16. DIGEST_SIZE_224 :: 28
  17. DIGEST_SIZE_256 :: 32
  18. DIGEST_SIZE_384 :: 48
  19. DIGEST_SIZE_512 :: 64
  20. // hash_string_224 will hash the given input and return the
  21. // computed hash
  22. hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
  23. return hash_bytes_224(transmute([]byte)(data))
  24. }
  25. // hash_bytes_224 will hash the given input and return the
  26. // computed hash
  27. hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
  28. hash: [DIGEST_SIZE_224]byte
  29. ctx: Context
  30. ctx.mdlen = DIGEST_SIZE_224
  31. ctx.is_keccak = true
  32. init(&ctx)
  33. update(&ctx, data)
  34. final(&ctx, hash[:])
  35. return hash
  36. }
  37. // hash_string_to_buffer_224 will hash the given input and assign the
  38. // computed hash to the second parameter.
  39. // It requires that the destination buffer is at least as big as the digest size
  40. hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
  41. hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
  42. }
  43. // hash_bytes_to_buffer_224 will hash the given input and write the
  44. // computed hash into the second parameter.
  45. // It requires that the destination buffer is at least as big as the digest size
  46. hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
  47. ctx: Context
  48. ctx.mdlen = DIGEST_SIZE_224
  49. ctx.is_keccak = true
  50. init(&ctx)
  51. update(&ctx, data)
  52. final(&ctx, hash)
  53. }
  54. // hash_stream_224 will read the stream in chunks and compute a
  55. // hash from its contents
  56. hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
  57. hash: [DIGEST_SIZE_224]byte
  58. ctx: Context
  59. ctx.mdlen = DIGEST_SIZE_224
  60. ctx.is_keccak = true
  61. init(&ctx)
  62. buf := make([]byte, 512)
  63. defer delete(buf)
  64. read := 1
  65. for read > 0 {
  66. read, _ = io.read(s, buf)
  67. if read > 0 {
  68. update(&ctx, buf[:read])
  69. }
  70. }
  71. final(&ctx, hash[:])
  72. return hash, true
  73. }
  74. // hash_file_224 will read the file provided by the given handle
  75. // and compute a hash
  76. hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
  77. if !load_at_once {
  78. return hash_stream_224(os.stream_from_handle(hd))
  79. } else {
  80. if buf, ok := os.read_entire_file(hd); ok {
  81. return hash_bytes_224(buf[:]), ok
  82. }
  83. }
  84. return [DIGEST_SIZE_224]byte{}, false
  85. }
  86. hash_224 :: proc {
  87. hash_stream_224,
  88. hash_file_224,
  89. hash_bytes_224,
  90. hash_string_224,
  91. hash_bytes_to_buffer_224,
  92. hash_string_to_buffer_224,
  93. }
  94. // hash_string_256 will hash the given input and return the
  95. // computed hash
  96. hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
  97. return hash_bytes_256(transmute([]byte)(data))
  98. }
  99. // hash_bytes_256 will hash the given input and return the
  100. // computed hash
  101. hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
  102. hash: [DIGEST_SIZE_256]byte
  103. ctx: Context
  104. ctx.mdlen = DIGEST_SIZE_256
  105. ctx.is_keccak = true
  106. init(&ctx)
  107. update(&ctx, data)
  108. final(&ctx, hash[:])
  109. return hash
  110. }
  111. // hash_string_to_buffer_256 will hash the given input and assign the
  112. // computed hash to the second parameter.
  113. // It requires that the destination buffer is at least as big as the digest size
  114. hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
  115. hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
  116. }
  117. // hash_bytes_to_buffer_256 will hash the given input and write the
  118. // computed hash into the second parameter.
  119. // It requires that the destination buffer is at least as big as the digest size
  120. hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
  121. ctx: Context
  122. ctx.mdlen = DIGEST_SIZE_256
  123. ctx.is_keccak = true
  124. init(&ctx)
  125. update(&ctx, data)
  126. final(&ctx, hash)
  127. }
  128. // hash_stream_256 will read the stream in chunks and compute a
  129. // hash from its contents
  130. hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
  131. hash: [DIGEST_SIZE_256]byte
  132. ctx: Context
  133. ctx.mdlen = DIGEST_SIZE_256
  134. ctx.is_keccak = true
  135. init(&ctx)
  136. buf := make([]byte, 512)
  137. defer delete(buf)
  138. read := 1
  139. for read > 0 {
  140. read, _ = io.read(s, buf)
  141. if read > 0 {
  142. update(&ctx, buf[:read])
  143. }
  144. }
  145. final(&ctx, hash[:])
  146. return hash, true
  147. }
  148. // hash_file_256 will read the file provided by the given handle
  149. // and compute a hash
  150. hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
  151. if !load_at_once {
  152. return hash_stream_256(os.stream_from_handle(hd))
  153. } else {
  154. if buf, ok := os.read_entire_file(hd); ok {
  155. return hash_bytes_256(buf[:]), ok
  156. }
  157. }
  158. return [DIGEST_SIZE_256]byte{}, false
  159. }
  160. hash_256 :: proc {
  161. hash_stream_256,
  162. hash_file_256,
  163. hash_bytes_256,
  164. hash_string_256,
  165. hash_bytes_to_buffer_256,
  166. hash_string_to_buffer_256,
  167. }
  168. // hash_string_384 will hash the given input and return the
  169. // computed hash
  170. hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
  171. return hash_bytes_384(transmute([]byte)(data))
  172. }
  173. // hash_bytes_384 will hash the given input and return the
  174. // computed hash
  175. hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
  176. hash: [DIGEST_SIZE_384]byte
  177. ctx: Context
  178. ctx.mdlen = DIGEST_SIZE_384
  179. ctx.is_keccak = true
  180. init(&ctx)
  181. update(&ctx, data)
  182. final(&ctx, hash[:])
  183. return hash
  184. }
  185. // hash_string_to_buffer_384 will hash the given input and assign the
  186. // computed hash to the second parameter.
  187. // It requires that the destination buffer is at least as big as the digest size
  188. hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
  189. hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
  190. }
  191. // hash_bytes_to_buffer_384 will hash the given input and write the
  192. // computed hash into the second parameter.
  193. // It requires that the destination buffer is at least as big as the digest size
  194. hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
  195. ctx: Context
  196. ctx.mdlen = DIGEST_SIZE_384
  197. ctx.is_keccak = true
  198. init(&ctx)
  199. update(&ctx, data)
  200. final(&ctx, hash)
  201. }
  202. // hash_stream_384 will read the stream in chunks and compute a
  203. // hash from its contents
  204. hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
  205. hash: [DIGEST_SIZE_384]byte
  206. ctx: Context
  207. ctx.mdlen = DIGEST_SIZE_384
  208. ctx.is_keccak = true
  209. init(&ctx)
  210. buf := make([]byte, 512)
  211. defer delete(buf)
  212. read := 1
  213. for read > 0 {
  214. read, _ = io.read(s, buf)
  215. if read > 0 {
  216. update(&ctx, buf[:read])
  217. }
  218. }
  219. final(&ctx, hash[:])
  220. return hash, true
  221. }
  222. // hash_file_384 will read the file provided by the given handle
  223. // and compute a hash
  224. hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
  225. if !load_at_once {
  226. return hash_stream_384(os.stream_from_handle(hd))
  227. } else {
  228. if buf, ok := os.read_entire_file(hd); ok {
  229. return hash_bytes_384(buf[:]), ok
  230. }
  231. }
  232. return [DIGEST_SIZE_384]byte{}, false
  233. }
  234. hash_384 :: proc {
  235. hash_stream_384,
  236. hash_file_384,
  237. hash_bytes_384,
  238. hash_string_384,
  239. hash_bytes_to_buffer_384,
  240. hash_string_to_buffer_384,
  241. }
  242. // hash_string_512 will hash the given input and return the
  243. // computed hash
  244. hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
  245. return hash_bytes_512(transmute([]byte)(data))
  246. }
  247. // hash_bytes_512 will hash the given input and return the
  248. // computed hash
  249. hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
  250. hash: [DIGEST_SIZE_512]byte
  251. ctx: Context
  252. ctx.mdlen = DIGEST_SIZE_512
  253. ctx.is_keccak = true
  254. init(&ctx)
  255. update(&ctx, data)
  256. final(&ctx, hash[:])
  257. return hash
  258. }
  259. // hash_string_to_buffer_512 will hash the given input and assign the
  260. // computed hash to the second parameter.
  261. // It requires that the destination buffer is at least as big as the digest size
  262. hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
  263. hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
  264. }
  265. // hash_bytes_to_buffer_512 will hash the given input and write the
  266. // computed hash into the second parameter.
  267. // It requires that the destination buffer is at least as big as the digest size
  268. hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
  269. ctx: Context
  270. ctx.mdlen = DIGEST_SIZE_512
  271. ctx.is_keccak = true
  272. init(&ctx)
  273. update(&ctx, data)
  274. final(&ctx, hash)
  275. }
  276. // hash_stream_512 will read the stream in chunks and compute a
  277. // hash from its contents
  278. hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
  279. hash: [DIGEST_SIZE_512]byte
  280. ctx: Context
  281. ctx.mdlen = DIGEST_SIZE_512
  282. ctx.is_keccak = true
  283. init(&ctx)
  284. buf := make([]byte, 512)
  285. defer delete(buf)
  286. read := 1
  287. for read > 0 {
  288. read, _ = io.read(s, buf)
  289. if read > 0 {
  290. update(&ctx, buf[:read])
  291. }
  292. }
  293. final(&ctx, hash[:])
  294. return hash, true
  295. }
  296. // hash_file_512 will read the file provided by the given handle
  297. // and compute a hash
  298. hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
  299. if !load_at_once {
  300. return hash_stream_512(os.stream_from_handle(hd))
  301. } else {
  302. if buf, ok := os.read_entire_file(hd); ok {
  303. return hash_bytes_512(buf[:]), ok
  304. }
  305. }
  306. return [DIGEST_SIZE_512]byte{}, false
  307. }
  308. hash_512 :: proc {
  309. hash_stream_512,
  310. hash_file_512,
  311. hash_bytes_512,
  312. hash_string_512,
  313. hash_bytes_to_buffer_512,
  314. hash_string_to_buffer_512,
  315. }
  316. /*
  317. Low level API
  318. */
  319. Context :: _sha3.Sha3_Context
  320. init :: proc(ctx: ^Context) {
  321. ctx.is_keccak = true
  322. _sha3.init(ctx)
  323. }
  324. update :: proc(ctx: ^Context, data: []byte) {
  325. _sha3.update(ctx, data)
  326. }
  327. final :: proc(ctx: ^Context, hash: []byte) {
  328. _sha3.final(ctx, hash)
  329. }