Browse Source

Merge branch 'odin-lang:master' into master

marcs feh 1 year ago
parent
commit
9c6574e053
77 changed files with 4855 additions and 3167 deletions
  1. 45 4
      .github/workflows/ci.yml
  2. 42 2
      .github/workflows/nightly.yml
  3. 2 1
      .gitignore
  4. 8 0
      base/runtime/core.odin
  5. 14 0
      base/runtime/core_builtin.odin
  6. 0 1
      base/runtime/core_builtin_soa.odin
  7. 51 0
      base/runtime/dynamic_map_internal.odin
  8. 6 17
      base/runtime/os_specific_darwin.odin
  9. 8 70
      core/crypto/README.md
  10. 58 39
      core/crypto/_blake2/blake2.odin
  11. 68 43
      core/crypto/_sha3/sha3.odin
  12. 32 100
      core/crypto/blake2b/blake2b.odin
  13. 32 100
      core/crypto/blake2s/blake2s.odin
  14. 62 0
      core/crypto/hash/doc.odin
  15. 116 0
      core/crypto/hash/hash.odin
  16. 353 0
      core/crypto/hash/low_level.odin
  17. 162 0
      core/crypto/hmac/hmac.odin
  18. 54 336
      core/crypto/legacy/keccak/keccak.odin
  19. 50 98
      core/crypto/legacy/md5/md5.odin
  20. 52 99
      core/crypto/legacy/sha1/sha1.odin
  21. 0 4
      core/crypto/poly1305/poly1305.odin
  22. 88 425
      core/crypto/sha2/sha2.odin
  23. 56 324
      core/crypto/sha3/sha3.odin
  24. 40 178
      core/crypto/shake/shake.odin
  25. 47 98
      core/crypto/sm3/sm3.odin
  26. 18 25
      core/dynlib/lib.odin
  27. 25 0
      core/math/linalg/general.odin
  28. 37 0
      core/math/linalg/specific.odin
  29. 20 0
      core/math/rand/rand.odin
  30. 2 2
      core/mem/virtual/arena.odin
  31. 46 11
      core/os/os_darwin.odin
  32. 13 2
      core/os/os_freebsd.odin
  33. 22 4
      core/os/os_linux.odin
  34. 13 2
      core/os/os_openbsd.odin
  35. 13 0
      core/os/stream.odin
  36. 11 16
      core/path/filepath/path.odin
  37. 29 17
      core/prof/spall/doc.odin
  38. 25 10
      core/prof/spall/spall.odin
  39. 36 0
      core/prof/spall/spall_linux.odin
  40. 57 0
      core/prof/spall/spall_unix.odin
  41. 54 0
      core/prof/spall/spall_windows.odin
  42. 171 0
      core/relative/relative.odin
  43. 1 0
      core/sys/linux/helpers.odin
  44. 6 1
      core/sys/linux/sys.odin
  45. 1 0
      core/sys/windows/kernel32.odin
  46. 1 0
      core/sys/windows/user32.odin
  47. 4 0
      examples/all/all_main.odin
  48. 19 13
      src/build_settings.cpp
  49. 136 53
      src/check_builtin.cpp
  50. 22 17
      src/check_expr.cpp
  51. 25 6
      src/check_type.cpp
  52. 26 8
      src/checker.cpp
  53. 18 0
      src/checker.hpp
  54. 14 18
      src/linker.cpp
  55. 1 1
      src/llvm_backend.cpp
  56. 2 0
      src/llvm_backend.hpp
  57. 1 1
      src/llvm_backend_expr.cpp
  58. 15 4
      src/llvm_backend_opt.cpp
  59. 52 15
      src/llvm_backend_proc.cpp
  60. 25 11
      src/main.cpp
  61. 63 3
      src/parser.cpp
  62. 12 0
      src/string.cpp
  63. 21 2
      src/types.cpp
  64. 1 1
      tests/core/Makefile
  65. 2 2
      tests/core/build.bat
  66. 415 425
      tests/core/crypto/test_core_crypto.odin
  67. 616 0
      tests/core/crypto/test_core_crypto_hash.odin
  68. 241 0
      tests/core/crypto/test_core_crypto_mac.odin
  69. 0 541
      tests/core/crypto/test_core_crypto_modern.odin
  70. 236 0
      tests/core/crypto/test_crypto_benchmark.odin
  71. 1 2
      tests/core/encoding/hxa/test_core_hxa.odin
  72. 10 2
      tests/internal/test_pow.odin
  73. 0 2
      vendor/OpenGL/helpers.odin
  74. 3 3
      vendor/raylib/raylib.odin
  75. 818 0
      vendor/raylib/raymath.odin
  76. 2 2
      vendor/wasm/WebGL/webgl.odin
  77. 7 6
      vendor/wasm/WebGL/webgl2.odin

+ 45 - 4
.github/workflows/ci.yml

@@ -3,6 +3,7 @@ on: [push, pull_request, workflow_dispatch]
 
 
 jobs:
 jobs:
   build_linux:
   build_linux:
+    name: Ubuntu Build, Check, and Test
     runs-on: ubuntu-latest
     runs-on: ubuntu-latest
     steps:
     steps:
       - uses: actions/checkout@v1
       - uses: actions/checkout@v1
@@ -46,6 +47,9 @@ jobs:
       - name: Odin check examples/all for Linux i386
       - name: Odin check examples/all for Linux i386
         run: ./odin check examples/all -vet -strict-style -target:linux_i386
         run: ./odin check examples/all -vet -strict-style -target:linux_i386
         timeout-minutes: 10
         timeout-minutes: 10
+      - name: Odin check examples/all for Linux arm64
+        run: ./odin check examples/all -vet -strict-style -target:linux_arm64
+        timeout-minutes: 10
       - name: Odin check examples/all for FreeBSD amd64
       - name: Odin check examples/all for FreeBSD amd64
         run: ./odin check examples/all -vet -strict-style -target:freebsd_amd64
         run: ./odin check examples/all -vet -strict-style -target:freebsd_amd64
         timeout-minutes: 10
         timeout-minutes: 10
@@ -53,6 +57,7 @@ jobs:
         run: ./odin check examples/all -vet -strict-style -target:openbsd_amd64
         run: ./odin check examples/all -vet -strict-style -target:openbsd_amd64
         timeout-minutes: 10
         timeout-minutes: 10
   build_macOS:
   build_macOS:
+    name: MacOS Build, Check, and Test
     runs-on: macos-latest
     runs-on: macos-latest
     steps:
     steps:
       - uses: actions/checkout@v1
       - uses: actions/checkout@v1
@@ -92,13 +97,49 @@ jobs:
           cd tests/internal
           cd tests/internal
           make
           make
         timeout-minutes: 10
         timeout-minutes: 10
-      - name: Odin check examples/all for Darwin arm64
-        run: ./odin check examples/all -vet -strict-style -target:darwin_arm64
+  build_macOS_arm:
+    name: MacOS ARM Build, Check, and Test
+    runs-on: macos-14 # This is an arm/m1 runner.
+    steps:
+      - uses: actions/checkout@v1
+      - name: Download LLVM, botan and setup PATH
+        run: |
+          brew install llvm@13 botan
+          echo "/opt/homebrew/opt/llvm@13/bin" >> $GITHUB_PATH
+          TMP_PATH=$(xcrun --show-sdk-path)/user/include
+          echo "CPATH=$TMP_PATH" >> $GITHUB_ENV
+      - name: build odin
+        run: ./build_odin.sh release
+      - name: Odin version
+        run: ./odin version
+        timeout-minutes: 1
+      - name: Odin report
+        run: ./odin report
+        timeout-minutes: 1
+      - name: Odin check
+        run: ./odin check examples/demo -vet
+        timeout-minutes: 10
+      - name: Odin run
+        run: ./odin run examples/demo
         timeout-minutes: 10
         timeout-minutes: 10
-      - name: Odin check examples/all for Linux arm64
-        run: ./odin check examples/all -vet -strict-style -target:linux_arm64
+      - name: Odin run -debug
+        run: ./odin run examples/demo -debug
+        timeout-minutes: 10
+      - name: Odin check examples/all
+        run: ./odin check examples/all -strict-style
+        timeout-minutes: 10
+      - name: Core library tests
+        run: |
+          cd tests/core
+          make
+        timeout-minutes: 10
+      - name: Odin internals tests
+        run: |
+          cd tests/internal
+          make
         timeout-minutes: 10
         timeout-minutes: 10
   build_windows:
   build_windows:
+    name: Windows Build, Check, and Test
     runs-on: windows-2022
     runs-on: windows-2022
     steps:
     steps:
       - uses: actions/checkout@v1
       - uses: actions/checkout@v1

+ 42 - 2
.github/workflows/nightly.yml

@@ -7,6 +7,7 @@ on:
 
 
 jobs:
 jobs:
   build_windows:
   build_windows:
+    name: Windows Build
     if: github.repository == 'odin-lang/Odin'
     if: github.repository == 'odin-lang/Odin'
     runs-on: windows-2022
     runs-on: windows-2022
     steps:
     steps:
@@ -40,6 +41,7 @@ jobs:
           name: windows_artifacts
           name: windows_artifacts
           path: dist
           path: dist
   build_ubuntu:
   build_ubuntu:
+    name: Ubuntu Build
     if: github.repository == 'odin-lang/Odin'
     if: github.repository == 'odin-lang/Odin'
     runs-on: ubuntu-latest
     runs-on: ubuntu-latest
     steps:
     steps:
@@ -67,8 +69,9 @@ jobs:
           name: ubuntu_artifacts
           name: ubuntu_artifacts
           path: dist
           path: dist
   build_macos:
   build_macos:
+    name: MacOS Build
     if: github.repository == 'odin-lang/Odin'
     if: github.repository == 'odin-lang/Odin'
-    runs-on: macOS-latest
+    runs-on: macos-latest
     steps:
     steps:
       - uses: actions/checkout@v1
       - uses: actions/checkout@v1
       - name: Download LLVM and setup PATH
       - name: Download LLVM and setup PATH
@@ -96,9 +99,40 @@ jobs:
         with:
         with:
           name: macos_artifacts
           name: macos_artifacts
           path: dist
           path: dist
+  build_macos_arm:
+    name: MacOS ARM Build
+    if: github.repository == 'odin-lang/Odin'
+    runs-on: macos-14
+    steps:
+      - uses: actions/checkout@v1
+      - name: Download LLVM and setup PATH
+        run: |
+          brew install llvm@13
+          echo "/opt/homebrew/opt/llvm@13/bin" >> $GITHUB_PATH
+          TMP_PATH=$(xcrun --show-sdk-path)/user/include
+          echo "CPATH=$TMP_PATH" >> $GITHUB_ENV
+      - name: build odin
+        run: make nightly
+      - name: Odin run
+        run: ./odin run examples/demo
+      - name: Copy artifacts
+        run: |
+          mkdir dist
+          cp odin dist
+          cp LICENSE dist
+          cp -r shared dist
+          cp -r base dist
+          cp -r core dist
+          cp -r vendor dist
+          cp -r examples dist
+      - name: Upload artifact
+        uses: actions/upload-artifact@v1
+        with:
+          name: macos_arm_artifacts
+          path: dist
   upload_b2:
   upload_b2:
     runs-on: [ubuntu-latest]
     runs-on: [ubuntu-latest]
-    needs: [build_windows, build_macos, build_ubuntu]
+    needs: [build_windows, build_macos, build_macos_arm, build_ubuntu]
     steps:
     steps:
       - uses: actions/checkout@v1
       - uses: actions/checkout@v1
       - uses: actions/setup-python@v2
       - uses: actions/setup-python@v2
@@ -129,6 +163,11 @@ jobs:
         with:
         with:
           name: macos_artifacts
           name: macos_artifacts
 
 
+      - name: Download macOS arm artifacts
+        uses: actions/download-artifact@v1
+        with:
+          name: macos_arm_artifacts
+
       - name: Create archives and upload
       - name: Create archives and upload
         shell: bash
         shell: bash
         env:
         env:
@@ -145,6 +184,7 @@ jobs:
           ./ci/upload_create_nightly.sh "$BUCKET" windows-amd64 windows_artifacts/
           ./ci/upload_create_nightly.sh "$BUCKET" windows-amd64 windows_artifacts/
           ./ci/upload_create_nightly.sh "$BUCKET" ubuntu-amd64 ubuntu_artifacts/
           ./ci/upload_create_nightly.sh "$BUCKET" ubuntu-amd64 ubuntu_artifacts/
           ./ci/upload_create_nightly.sh "$BUCKET" macos-amd64 macos_artifacts/
           ./ci/upload_create_nightly.sh "$BUCKET" macos-amd64 macos_artifacts/
+          ./ci/upload_create_nightly.sh "$BUCKET" macos-arm64 macos_arm_artifacts/
 
 
           echo Deleting old artifacts in B2
           echo Deleting old artifacts in B2
           python3 ci/delete_old_binaries.py "$BUCKET" "$DAYS_TO_KEEP"
           python3 ci/delete_old_binaries.py "$BUCKET" "$DAYS_TO_KEEP"

+ 2 - 1
.gitignore

@@ -39,7 +39,7 @@ tests/core/test_core_net
 tests/core/test_core_os_exit
 tests/core/test_core_os_exit
 tests/core/test_core_reflect
 tests/core/test_core_reflect
 tests/core/test_core_strings
 tests/core/test_core_strings
-tests/core/test_crypto_hash
+tests/core/test_crypto
 tests/core/test_hash
 tests/core/test_hash
 tests/core/test_hxa
 tests/core/test_hxa
 tests/core/test_json
 tests/core/test_json
@@ -49,6 +49,7 @@ tests/core/test_varint
 tests/core/test_xml
 tests/core/test_xml
 tests/core/test_core_slice
 tests/core/test_core_slice
 tests/core/test_core_thread
 tests/core/test_core_thread
+tests/core/test_core_runtime
 tests/vendor/vendor_botan
 tests/vendor/vendor_botan
 # Visual Studio 2015 cache/options directory
 # Visual Studio 2015 cache/options directory
 .vs/
 .vs/

+ 8 - 0
base/runtime/core.odin

@@ -296,6 +296,14 @@ Source_Code_Location :: struct {
 	procedure:    string,
 	procedure:    string,
 }
 }
 
 
+/*
+	Used by the built-in directory `#load_directory(path: string) -> []Load_Directory_File`
+*/
+Load_Directory_File :: struct {
+	name: string,
+	data: []byte, // immutable data
+}
+
 Assertion_Failure_Proc :: #type proc(prefix, message: string, loc: Source_Code_Location) -> !
 Assertion_Failure_Proc :: #type proc(prefix, message: string, loc: Source_Code_Location) -> !
 
 
 // Allocation Stuff
 // Allocation Stuff

+ 14 - 0
base/runtime/core_builtin.odin

@@ -824,6 +824,20 @@ map_insert :: proc(m: ^$T/map[$K]$V, key: K, value: V, loc := #caller_location)
 	return (^V)(__dynamic_map_set_without_hash((^Raw_Map)(m), map_info(T), rawptr(&key), rawptr(&value), loc))
 	return (^V)(__dynamic_map_set_without_hash((^Raw_Map)(m), map_info(T), rawptr(&key), rawptr(&value), loc))
 }
 }
 
 
+// Explicitly inserts a key and value into a map `m`, the same as `map_insert`, but the return values differ.
+// - `prev_key_ptr` will return the previous pointer of a key if it exists, and `nil` otherwise.
+// - `value_ptr` will return the pointer of the memory where the insertion happens, and `nil` if the map failed to resize
+// - `found_previous` will be true if `prev_key_ptr != nil`
+@(require_results)
+map_insert_and_check_for_previous :: proc(m: ^$T/map[$K]$V, key: K, value: V, loc := #caller_location) -> (prev_key_ptr: ^K, value_ptr: ^V, found_previous: bool) {
+	key, value := key, value
+	kp, vp := __dynamic_map_set_extra_without_hash((^Raw_Map)(m), map_info(T), rawptr(&key), rawptr(&value), loc)
+	prev_key_ptr   = (^K)(kp)
+	value_ptr      = (^V)(vp)
+	found_previous = kp != nil
+	return
+}
+
 
 
 @builtin
 @builtin
 card :: proc "contextless" (s: $S/bit_set[$E; $U]) -> int {
 card :: proc "contextless" (s: $S/bit_set[$E; $U]) -> int {

+ 0 - 1
base/runtime/core_builtin_soa.odin

@@ -86,7 +86,6 @@ make_soa_aligned :: proc($T: typeid/#soa[]$E, length: int, alignment: int, alloc
 		return
 		return
 	}
 	}
 
 
-	array.allocator = allocator
 	footer := raw_soa_footer(&array)
 	footer := raw_soa_footer(&array)
 	if size_of(E) == 0 {
 	if size_of(E) == 0 {
 		footer.len = length
 		footer.len = length

+ 51 - 0
base/runtime/dynamic_map_internal.odin

@@ -841,6 +841,33 @@ __dynamic_map_get :: proc "contextless" (#no_alias m: ^Raw_Map, #no_alias info:
 	}
 	}
 }
 }
 
 
+__dynamic_map_get_key_and_value :: proc "contextless" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_Info, h: Map_Hash, key: rawptr) -> (key_ptr, value_ptr: rawptr) {
+	if m.len == 0 {
+		return nil, nil
+	}
+	pos := map_desired_position(m^, h)
+	distance := uintptr(0)
+	mask := (uintptr(1) << map_log2_cap(m^)) - 1
+	ks, vs, hs, _, _ := map_kvh_data_dynamic(m^, info)
+	for {
+		element_hash := hs[pos]
+		if map_hash_is_empty(element_hash) {
+			return nil, nil
+		} else if distance > map_probe_distance(m^, element_hash, pos) {
+			return nil, nil
+		} else if element_hash == h {
+			other_key := rawptr(map_cell_index_dynamic(ks, info.ks, pos))
+			if info.key_equal(key, other_key) {
+				key_ptr   = other_key
+				value_ptr = rawptr(map_cell_index_dynamic(vs, info.vs, pos))
+				return
+			}
+		}
+		pos = (pos + 1) & mask
+		distance += 1
+	}
+}
+
 // IMPORTANT: USED WITHIN THE COMPILER
 // IMPORTANT: USED WITHIN THE COMPILER
 __dynamic_map_check_grow :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_Info, loc := #caller_location) -> (err: Allocator_Error, has_grown: bool) {
 __dynamic_map_check_grow :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_Info, loc := #caller_location) -> (err: Allocator_Error, has_grown: bool) {
 	if m.len >= map_resize_threshold(m^) {
 	if m.len >= map_resize_threshold(m^) {
@@ -874,6 +901,30 @@ __dynamic_map_set :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_In
 	m.len += 1
 	m.len += 1
 	return rawptr(result)
 	return rawptr(result)
 }
 }
+__dynamic_map_set_extra_without_hash :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_Info, key, value: rawptr, loc := #caller_location) -> (prev_key_ptr, value_ptr: rawptr) {
+	return __dynamic_map_set_extra(m, info, info.key_hasher(key, map_seed(m^)), key, value, loc)
+}
+
+__dynamic_map_set_extra :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^Map_Info, hash: Map_Hash, key, value: rawptr, loc := #caller_location) -> (prev_key_ptr, value_ptr: rawptr) {
+	if prev_key_ptr, value_ptr = __dynamic_map_get_key_and_value(m, info, hash, key); value_ptr != nil {
+		intrinsics.mem_copy_non_overlapping(value_ptr, value, info.vs.size_of_type)
+		return
+	}
+
+	hash := hash
+	err, has_grown := __dynamic_map_check_grow(m, info, loc)
+	if err != nil {
+		return nil, nil
+	}
+	if has_grown {
+		hash = info.key_hasher(key, map_seed(m^))
+	}
+
+	result := map_insert_hash_dynamic(m, info, hash, uintptr(key), uintptr(value))
+	m.len += 1
+	return nil, rawptr(result)
+}
+
 
 
 // IMPORTANT: USED WITHIN THE COMPILER
 // IMPORTANT: USED WITHIN THE COMPILER
 @(private)
 @(private)

+ 6 - 17
base/runtime/os_specific_darwin.odin

@@ -2,25 +2,14 @@
 //+private
 //+private
 package runtime
 package runtime
 
 
-foreign import libc "system:System.framework"
-
-@(default_calling_convention="c")
-foreign libc {
-	@(link_name="__stderrp")
-	_stderr: rawptr
-
-	@(link_name="fwrite")
-	_fwrite :: proc(ptr: rawptr, size: uint, nmemb: uint, stream: rawptr) -> uint ---
-
-	@(link_name="__error")
-	_get_errno :: proc() -> ^i32 ---
-}
+import "base:intrinsics"
 
 
 _stderr_write :: proc "contextless" (data: []byte) -> (int, _OS_Errno) {
 _stderr_write :: proc "contextless" (data: []byte) -> (int, _OS_Errno) {
-	ret := _fwrite(raw_data(data), 1, len(data), _stderr)
-	if ret < len(data) {
-		err := _get_errno()
-		return int(ret), _OS_Errno(err^ if err != nil else 0)
+	WRITE  :: 0x20000004
+	STDERR :: 2
+	ret := intrinsics.syscall(WRITE, STDERR, uintptr(raw_data(data)), uintptr(len(data)))
+	if ret < 0 {
+		return 0, _OS_Errno(-ret)
 	}
 	}
 	return int(ret), 0
 	return int(ret), 0
 }
 }

+ 8 - 70
core/crypto/README.md

@@ -1,84 +1,22 @@
 # crypto
 # crypto
 
 
-A cryptography library for the Odin language
+A cryptography library for the Odin language.
 
 
 ## Supported
 ## Supported
 
 
-This library offers various algorithms implemented in Odin.
-Please see the chart below for some of the options.
-
-## Hashing algorithms
-
-| Algorithm                                                                                                    |                  |
-|:-------------------------------------------------------------------------------------------------------------|:-----------------|
-| [BLAKE2B](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | &#10004;&#65039; |
-| [BLAKE2S](https://datatracker.ietf.org/doc/html/rfc7693)                                                     | &#10004;&#65039; |
-| [SHA-2](https://csrc.nist.gov/csrc/media/publications/fips/180/2/archive/2002-08-01/documents/fips180-2.pdf) | &#10004;&#65039; |
-| [SHA-3](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
-| [SHAKE](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                            | &#10004;&#65039; |
-| [SM3](https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02)                                           | &#10004;&#65039; |
-| legacy/[Keccak](https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf)                                    | &#10004;&#65039; |
-| legacy/[MD5](https://datatracker.ietf.org/doc/html/rfc1321)                                                  | &#10004;&#65039; |
-| legacy/[SHA-1](https://datatracker.ietf.org/doc/html/rfc3174)                                                | &#10004;&#65039; |
-
-#### High level API
-
-Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_<size>`\*.
-Included in these groups are six procedures.
-- `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally
-- `hash_bytes` - Hash a given byte slice and return the computed hash
-- `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally
-- `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash
-- `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it
-- `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true)
-
-\* On some algorithms there is another part to the name, since they might offer control about additional parameters.
-For instance, `SHA-2` offers different sizes.
-Computing a 512-bit hash is therefore achieved by calling `sha2.hash_512(...)`.
-
-#### Low level API
-
-The above mentioned procedures internally call three procedures: `init`, `update` and `final`.
-You may also directly call them, if you wish.
-
-#### Example
-
-```odin
-package crypto_example
-
-// Import the desired package
-import "core:crypto/blake2b"
-
-main :: proc() {
-    input := "foo"
-
-    // Compute the hash, using the high level API
-    computed_hash := blake2b.hash(input)
-
-    // Variant that takes a destination buffer, instead of returning the computed hash
-    hash := make([]byte, sha2.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash
-    blake2b.hash(input, hash[:])
-
-    // Compute the hash, using the low level API
-    ctx: blake2b.Context
-    computed_hash_low: [blake2b.DIGEST_SIZE]byte
-    blake2b.init(&ctx)
-    blake2b.update(&ctx, transmute([]byte)input)
-    blake2b.final(&ctx, computed_hash_low[:])
-}
-```
-For example uses of all available algorithms, please see the tests within `tests/core/crypto`.
+This package offers various algorithms implemented in Odin, along with
+useful helpers such as access to the system entropy source, and a
+constant-time byte comparison.
 
 
 ## Implementation considerations
 ## Implementation considerations
 
 
 - The crypto packages are not thread-safe.
 - The crypto packages are not thread-safe.
 - Best-effort is make to mitigate timing side-channels on reasonable
 - Best-effort is make to mitigate timing side-channels on reasonable
-  architectures. Architectures that are known to be unreasonable include
+  architectures.  Architectures that are known to be unreasonable include
   but are not limited to i386, i486, and WebAssembly.
   but are not limited to i386, i486, and WebAssembly.
-- Some but not all of the packages attempt to santize sensitive data,
-  however this is not done consistently through the library at the moment.
-  As Thomas Pornin puts it "In general, such memory cleansing is a fool's
-  quest."
+- The packages attempt to santize sensitive data, however this is, and
+  will remain a "best-effort" implementation decision.  As Thomas Pornin
+  puts it "In general, such memory cleansing is a fool's quest."
 - All of these packages have not received independent third party review.
 - All of these packages have not received independent third party review.
 
 
 ## License
 ## License

+ 58 - 39
core/crypto/_blake2/blake2.odin

@@ -11,6 +11,7 @@ package _blake2
 */
 */
 
 
 import "core:encoding/endian"
 import "core:encoding/endian"
+import "core:mem"
 
 
 BLAKE2S_BLOCK_SIZE :: 64
 BLAKE2S_BLOCK_SIZE :: 64
 BLAKE2S_SIZE :: 32
 BLAKE2S_SIZE :: 32
@@ -28,7 +29,6 @@ Blake2s_Context :: struct {
 	is_keyed:     bool,
 	is_keyed:     bool,
 	size:         byte,
 	size:         byte,
 	is_last_node: bool,
 	is_last_node: bool,
-	cfg:          Blake2_Config,
 
 
 	is_initialized: bool,
 	is_initialized: bool,
 }
 }
@@ -44,7 +44,6 @@ Blake2b_Context :: struct {
 	is_keyed:     bool,
 	is_keyed:     bool,
 	size:         byte,
 	size:         byte,
 	is_last_node: bool,
 	is_last_node: bool,
-	cfg:          Blake2_Config,
 
 
 	is_initialized: bool,
 	is_initialized: bool,
 }
 }
@@ -83,62 +82,61 @@ BLAKE2B_IV := [8]u64 {
 	0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
 	0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
 }
 }
 
 
-init :: proc(ctx: ^$T) {
+init :: proc(ctx: ^$T, cfg: ^Blake2_Config) {
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
-		block_size :: BLAKE2S_BLOCK_SIZE
 		max_size :: BLAKE2S_SIZE
 		max_size :: BLAKE2S_SIZE
 	} else when T == Blake2b_Context {
 	} else when T == Blake2b_Context {
-		block_size :: BLAKE2B_BLOCK_SIZE
 		max_size :: BLAKE2B_SIZE
 		max_size :: BLAKE2B_SIZE
 	}
 	}
 
 
-	if ctx.cfg.size > max_size {
+	if cfg.size > max_size {
 		panic("blake2: requested output size exceeeds algorithm max")
 		panic("blake2: requested output size exceeeds algorithm max")
 	}
 	}
 
 
-	p := make([]byte, block_size)
-	defer delete(p)
+	// To save having to allocate a scratch buffer, use the internal
+	// data buffer (`ctx.x`), as it is exactly the correct size.
+	p := ctx.x[:]
 
 
-	p[0] = ctx.cfg.size
-	p[1] = byte(len(ctx.cfg.key))
+	p[0] = cfg.size
+	p[1] = byte(len(cfg.key))
 
 
-	if ctx.cfg.salt != nil {
+	if cfg.salt != nil {
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
-			copy(p[16:], ctx.cfg.salt)
+			copy(p[16:], cfg.salt)
 		} else when T == Blake2b_Context {
 		} else when T == Blake2b_Context {
-			copy(p[32:], ctx.cfg.salt)
+			copy(p[32:], cfg.salt)
 		}
 		}
 	}
 	}
-	if ctx.cfg.person != nil {
+	if cfg.person != nil {
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
-			copy(p[24:], ctx.cfg.person)
+			copy(p[24:], cfg.person)
 		} else when T == Blake2b_Context {
 		} else when T == Blake2b_Context {
-			copy(p[48:], ctx.cfg.person)
+			copy(p[48:], cfg.person)
 		}
 		}
 	}
 	}
 
 
-	if ctx.cfg.tree != nil {
-		p[2] = ctx.cfg.tree.(Blake2_Tree).fanout
-		p[3] = ctx.cfg.tree.(Blake2_Tree).max_depth
-		endian.unchecked_put_u32le(p[4:], ctx.cfg.tree.(Blake2_Tree).leaf_size)
+	if cfg.tree != nil {
+		p[2] = cfg.tree.(Blake2_Tree).fanout
+		p[3] = cfg.tree.(Blake2_Tree).max_depth
+		endian.unchecked_put_u32le(p[4:], cfg.tree.(Blake2_Tree).leaf_size)
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
-			p[8] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset)
-			p[9] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 8)
-			p[10] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 16)
-			p[11] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 24)
-			p[12] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 32)
-			p[13] = byte(ctx.cfg.tree.(Blake2_Tree).node_offset >> 40)
-			p[14] = ctx.cfg.tree.(Blake2_Tree).node_depth
-			p[15] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
+			p[8] = byte(cfg.tree.(Blake2_Tree).node_offset)
+			p[9] = byte(cfg.tree.(Blake2_Tree).node_offset >> 8)
+			p[10] = byte(cfg.tree.(Blake2_Tree).node_offset >> 16)
+			p[11] = byte(cfg.tree.(Blake2_Tree).node_offset >> 24)
+			p[12] = byte(cfg.tree.(Blake2_Tree).node_offset >> 32)
+			p[13] = byte(cfg.tree.(Blake2_Tree).node_offset >> 40)
+			p[14] = cfg.tree.(Blake2_Tree).node_depth
+			p[15] = cfg.tree.(Blake2_Tree).inner_hash_size
 		} else when T == Blake2b_Context {
 		} else when T == Blake2b_Context {
-			endian.unchecked_put_u64le(p[8:], ctx.cfg.tree.(Blake2_Tree).node_offset)
-			p[16] = ctx.cfg.tree.(Blake2_Tree).node_depth
-			p[17] = ctx.cfg.tree.(Blake2_Tree).inner_hash_size
+			endian.unchecked_put_u64le(p[8:], cfg.tree.(Blake2_Tree).node_offset)
+			p[16] = cfg.tree.(Blake2_Tree).node_depth
+			p[17] = cfg.tree.(Blake2_Tree).inner_hash_size
 		}
 		}
 	} else {
 	} else {
 		p[2], p[3] = 1, 1
 		p[2], p[3] = 1, 1
 	}
 	}
-	ctx.size = ctx.cfg.size
+	ctx.size = cfg.size
 	for i := 0; i < 8; i += 1 {
 	for i := 0; i < 8; i += 1 {
 		when T == Blake2s_Context {
 		when T == Blake2s_Context {
 			ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
 			ctx.h[i] = BLAKE2S_IV[i] ~ endian.unchecked_get_u32le(p[i * 4:])
@@ -147,11 +145,14 @@ init :: proc(ctx: ^$T) {
 			ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
 			ctx.h[i] = BLAKE2B_IV[i] ~ endian.unchecked_get_u64le(p[i * 8:])
 		}
 		}
 	}
 	}
-	if ctx.cfg.tree != nil && ctx.cfg.tree.(Blake2_Tree).is_last_node {
+
+	mem.zero(&ctx.x, size_of(ctx.x)) // Done with the scratch space, no barrier.
+
+	if cfg.tree != nil && cfg.tree.(Blake2_Tree).is_last_node {
 		ctx.is_last_node = true
 		ctx.is_last_node = true
 	}
 	}
-	if len(ctx.cfg.key) > 0 {
-		copy(ctx.padded_key[:], ctx.cfg.key)
+	if len(cfg.key) > 0 {
+		copy(ctx.padded_key[:], cfg.key)
 		update(ctx, ctx.padded_key[:])
 		update(ctx, ctx.padded_key[:])
 		ctx.is_keyed = true
 		ctx.is_keyed = true
 	}
 	}
@@ -194,22 +195,40 @@ update :: proc(ctx: ^$T, p: []byte) {
 	ctx.nx += copy(ctx.x[ctx.nx:], p)
 	ctx.nx += copy(ctx.x[ctx.nx:], p)
 }
 }
 
 
-final :: proc(ctx: ^$T, hash: []byte) {
+final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: T
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
 	when T == Blake2s_Context {
 	when T == Blake2s_Context {
-		if len(hash) < int(ctx.cfg.size) {
+		if len(hash) < int(ctx.size) {
 			panic("crypto/blake2s: invalid destination digest size")
 			panic("crypto/blake2s: invalid destination digest size")
 		}
 		}
 		blake2s_final(ctx, hash)
 		blake2s_final(ctx, hash)
 	} else when T == Blake2b_Context {
 	} else when T == Blake2b_Context {
-		if len(hash) < int(ctx.cfg.size) {
+		if len(hash) < int(ctx.size) {
 			panic("crypto/blake2b: invalid destination digest size")
 			panic("crypto/blake2b: invalid destination digest size")
 		}
 		}
 		blake2b_final(ctx, hash)
 		blake2b_final(ctx, hash)
 	}
 	}
+}
+
+clone :: proc(ctx, other: ^$T) {
+	ctx^ = other^
+}
+
+reset :: proc(ctx: ^$T) {
+	if !ctx.is_initialized {
+		return
+	}
 
 
-	ctx.is_initialized = false
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
 @(private)
 @(private)

+ 68 - 43
core/crypto/_sha3/sha3.odin

@@ -12,10 +12,16 @@ package _sha3
 */
 */
 
 
 import "core:math/bits"
 import "core:math/bits"
+import "core:mem"
 
 
 ROUNDS :: 24
 ROUNDS :: 24
 
 
-Sha3_Context :: struct {
+RATE_224 :: 1152 / 8
+RATE_256 :: 1088 / 8
+RATE_384 :: 832 / 8
+RATE_512 :: 576 / 8
+
+Context :: struct {
 	st:        struct #raw_union {
 	st:        struct #raw_union {
 		b: [200]u8,
 		b: [200]u8,
 		q: [25]u64,
 		q: [25]u64,
@@ -103,81 +109,100 @@ keccakf :: proc "contextless" (st: ^[25]u64) {
 	}
 	}
 }
 }
 
 
-init :: proc(c: ^Sha3_Context) {
+init :: proc(ctx: ^Context) {
 	for i := 0; i < 25; i += 1 {
 	for i := 0; i < 25; i += 1 {
-		c.st.q[i] = 0
+		ctx.st.q[i] = 0
 	}
 	}
-	c.rsiz = 200 - 2 * c.mdlen
-	c.pt = 0
+	ctx.rsiz = 200 - 2 * ctx.mdlen
+	ctx.pt = 0
 
 
-	c.is_initialized = true
-	c.is_finalized = false
+	ctx.is_initialized = true
+	ctx.is_finalized = false
 }
 }
 
 
-update :: proc(c: ^Sha3_Context, data: []byte) {
-	assert(c.is_initialized)
-	assert(!c.is_finalized)
+update :: proc(ctx: ^Context, data: []byte) {
+	assert(ctx.is_initialized)
+	assert(!ctx.is_finalized)
 
 
-	j := c.pt
+	j := ctx.pt
 	for i := 0; i < len(data); i += 1 {
 	for i := 0; i < len(data); i += 1 {
-		c.st.b[j] ~= data[i]
+		ctx.st.b[j] ~= data[i]
 		j += 1
 		j += 1
-		if j >= c.rsiz {
-			keccakf(&c.st.q)
+		if j >= ctx.rsiz {
+			keccakf(&ctx.st.q)
 			j = 0
 			j = 0
 		}
 		}
 	}
 	}
-	c.pt = j
+	ctx.pt = j
 }
 }
 
 
-final :: proc(c: ^Sha3_Context, hash: []byte) {
-	assert(c.is_initialized)
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	assert(ctx.is_initialized)
 
 
-	if len(hash) < c.mdlen {
-		if c.is_keccak {
+	if len(hash) < ctx.mdlen {
+		if ctx.is_keccak {
 			panic("crypto/keccac: invalid destination digest size")
 			panic("crypto/keccac: invalid destination digest size")
 		}
 		}
 		panic("crypto/sha3: invalid destination digest size")
 		panic("crypto/sha3: invalid destination digest size")
 	}
 	}
-	if c.is_keccak {
-		c.st.b[c.pt] ~= 0x01
+
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: Context
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
+	if ctx.is_keccak {
+		ctx.st.b[ctx.pt] ~= 0x01
 	} else {
 	} else {
-		c.st.b[c.pt] ~= 0x06
+		ctx.st.b[ctx.pt] ~= 0x06
 	}
 	}
 
 
-	c.st.b[c.rsiz - 1] ~= 0x80
-	keccakf(&c.st.q)
-	for i := 0; i < c.mdlen; i += 1 {
-		hash[i] = c.st.b[i]
+	ctx.st.b[ctx.rsiz - 1] ~= 0x80
+	keccakf(&ctx.st.q)
+	for i := 0; i < ctx.mdlen; i += 1 {
+		hash[i] = ctx.st.b[i]
+	}
+}
+
+clone :: proc(ctx, other: ^Context) {
+	ctx^ = other^
+}
+
+reset :: proc(ctx: ^Context) {
+	if !ctx.is_initialized {
+		return
 	}
 	}
 
 
-	c.is_initialized = false // No more absorb, no more squeeze.
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
-shake_xof :: proc(c: ^Sha3_Context) {
-	assert(c.is_initialized)
-	assert(!c.is_finalized)
+shake_xof :: proc(ctx: ^Context) {
+	assert(ctx.is_initialized)
+	assert(!ctx.is_finalized)
 
 
-	c.st.b[c.pt] ~= 0x1F
-	c.st.b[c.rsiz - 1] ~= 0x80
-	keccakf(&c.st.q)
-	c.pt = 0
+	ctx.st.b[ctx.pt] ~= 0x1F
+	ctx.st.b[ctx.rsiz - 1] ~= 0x80
+	keccakf(&ctx.st.q)
+	ctx.pt = 0
 
 
-	c.is_finalized = true // No more absorb, unlimited squeeze.
+	ctx.is_finalized = true // No more absorb, unlimited squeeze.
 }
 }
 
 
-shake_out :: proc(c: ^Sha3_Context, hash: []byte) {
-	assert(c.is_initialized)
-	assert(c.is_finalized)
+shake_out :: proc(ctx: ^Context, hash: []byte) {
+	assert(ctx.is_initialized)
+	assert(ctx.is_finalized)
 
 
-	j := c.pt
+	j := ctx.pt
 	for i := 0; i < len(hash); i += 1 {
 	for i := 0; i < len(hash); i += 1 {
-		if j >= c.rsiz {
-			keccakf(&c.st.q)
+		if j >= ctx.rsiz {
+			keccakf(&ctx.st.q)
 			j = 0
 			j = 0
 		}
 		}
-		hash[i] = c.st.b[j]
+		hash[i] = ctx.st.b[j]
 		j += 1
 		j += 1
 	}
 	}
-	c.pt = j
+	ctx.pt = j
 }
 }

+ 32 - 100
core/crypto/blake2b/blake2b.odin

@@ -1,3 +1,10 @@
+/*
+package blake2b implements the BLAKE2b hash algorithm.
+
+See:
+- https://datatracker.ietf.org/doc/html/rfc7693
+- https://www.blake2.net
+*/
 package blake2b
 package blake2b
 
 
 /*
 /*
@@ -6,122 +13,47 @@ package blake2b
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Interface for the BLAKE2b hashing algorithm.
-    BLAKE2b and BLAKE2s share the implementation in the _blake2 package.
 */
 */
 
 
-import "core:io"
-import "core:os"
-
 import "../_blake2"
 import "../_blake2"
 
 
-/*
-    High level API
-*/
-
+// DIGEST_SIZE is the BLAKE2b digest size in bytes.
 DIGEST_SIZE :: 64
 DIGEST_SIZE :: 64
 
 
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-	return hash_bytes(transmute([]byte)(data))
-}
+// BLOCK_SIZE is the BLAKE2b block size in bytes.
+BLOCK_SIZE :: _blake2.BLAKE2B_BLOCK_SIZE
 
 
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	cfg: _blake2.Blake2_Config
-	cfg.size = _blake2.BLAKE2B_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	ctx: Context
-	cfg: _blake2.Blake2_Config
-	cfg.size = _blake2.BLAKE2B_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
+// Context is a BLAKE2b instance.
+Context :: _blake2.Blake2b_Context
 
 
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
+// init initializes a Context with the default BLAKE2b config.
+init :: proc(ctx: ^Context) {
 	cfg: _blake2.Blake2_Config
 	cfg: _blake2.Blake2_Config
 	cfg.size = _blake2.BLAKE2B_SIZE
 	cfg.size = _blake2.BLAKE2B_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_blake2.init(ctx, &cfg)
 }
 }
 
 
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-		return hash_stream(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE]byte{}, false
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	_blake2.update(ctx, data)
 }
 }
 
 
-hash :: proc {
-	hash_stream,
-	hash_file,
-	hash_bytes,
-	hash_string,
-	hash_bytes_to_buffer,
-	hash_string_to_buffer,
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	_blake2.final(ctx, hash, finalize_clone)
 }
 }
 
 
-/*
-    Low level API
-*/
-
-Context :: _blake2.Blake2b_Context
-
-init :: proc(ctx: ^Context) {
-	_blake2.init(ctx)
-}
-
-update :: proc(ctx: ^Context, data: []byte) {
-	_blake2.update(ctx, data)
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	_blake2.clone(ctx, other)
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
-	_blake2.final(ctx, hash)
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	_blake2.reset(ctx)
 }
 }

+ 32 - 100
core/crypto/blake2s/blake2s.odin

@@ -1,3 +1,10 @@
+/*
+package blake2s implements the BLAKE2s hash algorithm.
+
+See:
+- https://datatracker.ietf.org/doc/html/rfc7693
+- https://www.blake2.net/
+*/
 package blake2s
 package blake2s
 
 
 /*
 /*
@@ -6,122 +13,47 @@ package blake2s
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Interface for the BLAKE2s hashing algorithm.
-    BLAKE2s and BLAKE2b share the implementation in the _blake2 package.
 */
 */
 
 
-import "core:io"
-import "core:os"
-
 import "../_blake2"
 import "../_blake2"
 
 
-/*
-    High level API
-*/
-
+// DIGEST_SIZE is the BLAKE2s digest size in bytes.
 DIGEST_SIZE :: 32
 DIGEST_SIZE :: 32
 
 
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-	return hash_bytes(transmute([]byte)(data))
-}
+// BLOCK_SIZE is the BLAKE2s block size in bytes.
+BLOCK_SIZE :: _blake2.BLAKE2S_BLOCK_SIZE
 
 
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	cfg: _blake2.Blake2_Config
-	cfg.size = _blake2.BLAKE2S_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	ctx: Context
-	cfg: _blake2.Blake2_Config
-	cfg.size = _blake2.BLAKE2S_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
+// Context is a BLAKE2s instance.
+Context :: _blake2.Blake2s_Context
 
 
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
+// init initializes a Context with the default BLAKE2s config.
+init :: proc(ctx: ^Context) {
 	cfg: _blake2.Blake2_Config
 	cfg: _blake2.Blake2_Config
 	cfg.size = _blake2.BLAKE2S_SIZE
 	cfg.size = _blake2.BLAKE2S_SIZE
-	ctx.cfg = cfg
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_blake2.init(ctx, &cfg)
 }
 }
 
 
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-		return hash_stream(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE]byte{}, false
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	_blake2.update(ctx, data)
 }
 }
 
 
-hash :: proc {
-	hash_stream,
-	hash_file,
-	hash_bytes,
-	hash_string,
-	hash_bytes_to_buffer,
-	hash_string_to_buffer,
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	_blake2.final(ctx, hash, finalize_clone)
 }
 }
 
 
-/*
-    Low level API
-*/
-
-Context :: _blake2.Blake2s_Context
-
-init :: proc(ctx: ^Context) {
-	_blake2.init(ctx)
-}
-
-update :: proc(ctx: ^Context, data: []byte) {
-	_blake2.update(ctx, data)
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	_blake2.clone(ctx, other)
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
-	_blake2.final(ctx, hash)
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	_blake2.reset(ctx)
 }
 }

+ 62 - 0
core/crypto/hash/doc.odin

@@ -0,0 +1,62 @@
+/*
+package hash provides a generic interface to the supported hash algorithms.
+
+A high-level convenience procedure group `hash` is provided to easily
+accomplish common tasks.
+- `hash_string` - Hash a given string and return the digest.
+- `hash_bytes` - Hash a given byte slice and return the digest.
+- `hash_string_to_buffer` - Hash a given string and put the digest in
+  the third parameter.  It requires that the destination buffer
+  is at least as big as the digest size.
+- `hash_bytes_to_buffer` - Hash a given string and put the computed
+  digest in the third parameter.  It requires that the destination
+  buffer is at least as big as the digest size.
+- `hash_stream` - Incrementally fully consume a `io.Stream`, and return
+  the computed digest.
+- `hash_file` - Takes a file handle and returns the computed digest.
+  A third optional boolean parameter controls if the file is streamed
+  (default), or or read at once.
+
+```odin
+package hash_example
+
+import "core:crypto/hash"
+
+main :: proc() {
+	input := "Feed the fire."
+
+	// Compute the digest, using the high level API.
+	returned_digest := hash.hash(hash.Algorithm.SHA512_256, input)
+	defer delete(returned_digest)
+
+	// Variant that takes a destination buffer, instead of returning
+	// the digest.
+	digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.BLAKE2B]) // @note: Destination buffer has to be at least as big as the digest size of the hash.
+	defer delete(digest)
+	hash.hash(hash.Algorithm.BLAKE2B, input, digest)
+}
+```
+
+A generic low level API is provided supporting the init/update/final interface
+that is typical with cryptographic hash function implementations.
+
+```odin
+package hash_example
+
+import "core:crypto/hash"
+
+main :: proc() {
+    input := "Let the cinders burn."
+
+    // Compute the digest, using the low level API.
+    ctx: hash.Context
+    digest := make([]byte, hash.DIGEST_SIZES[hash.Algorithm.SHA3_512])
+    defer delete(digest)
+
+    hash.init(&ctx, hash.Algorithm.SHA3_512)
+    hash.update(&ctx, transmute([]byte)input)
+    hash.final(&ctx, digest)
+}
+```
+*/
+package crypto_hash

+ 116 - 0
core/crypto/hash/hash.odin

@@ -0,0 +1,116 @@
+package crypto_hash
+
+/*
+    Copyright 2021 zhibog
+    Made available under the BSD-3 license.
+
+    List of contributors:
+        zhibog, dotbmp:  Initial implementation.
+*/
+
+import "core:io"
+import "core:mem"
+import "core:os"
+
+// hash_bytes will hash the given input and return the computed digest
+// in a newly allocated slice.
+hash_string :: proc(algorithm: Algorithm, data: string, allocator := context.allocator) -> []byte {
+	return hash_bytes(algorithm, transmute([]byte)(data), allocator)
+}
+
+// hash_bytes will hash the given input and return the computed digest
+// in a newly allocated slice.
+hash_bytes :: proc(algorithm: Algorithm, data: []byte, allocator := context.allocator) -> []byte {
+	dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
+	hash_bytes_to_buffer(algorithm, data, dst)
+	return dst
+}
+
+// hash_string_to_buffer will hash the given input and assign the
+// computed digest to the third parameter.  It requires that the
+// destination buffer is at least as big as the digest size.
+hash_string_to_buffer :: proc(algorithm: Algorithm, data: string, hash: []byte) {
+	hash_bytes_to_buffer(algorithm, transmute([]byte)(data), hash)
+}
+
+// hash_bytes_to_buffer will hash the given input and write the
+// computed digest into the third parameter.  It requires that the
+// destination buffer is at least as big as the digest size.
+hash_bytes_to_buffer :: proc(algorithm: Algorithm, data, hash: []byte) {
+	ctx: Context
+
+	init(&ctx, algorithm)
+	update(&ctx, data)
+	final(&ctx, hash)
+}
+
+// hash_stream will incrementally fully consume a stream, and return the
+// computed digest in a newly allocated slice.
+hash_stream :: proc(
+	algorithm: Algorithm,
+	s: io.Stream,
+	allocator := context.allocator,
+) -> (
+	[]byte,
+	io.Error,
+) {
+	ctx: Context
+
+	buf: [MAX_BLOCK_SIZE * 4]byte
+	defer mem.zero_explicit(&buf, size_of(buf))
+
+	init(&ctx, algorithm)
+
+	loop: for {
+		n, err := io.read(s, buf[:])
+		if n > 0 {
+			// XXX/yawning: Can io.read return n > 0 and EOF?
+			update(&ctx, buf[:n])
+		}
+		#partial switch err {
+		case .None:
+		case .EOF:
+			break loop
+		case:
+			return nil, err
+		}
+	}
+
+	dst := make([]byte, DIGEST_SIZES[algorithm], allocator)
+	final(&ctx, dst)
+
+	return dst, io.Error.None
+}
+
+// hash_file will read the file provided by the given handle and return the
+// computed digest in a newly allocated slice.
+hash_file :: proc(
+	algorithm: Algorithm,
+	hd: os.Handle,
+	load_at_once := false,
+	allocator := context.allocator,
+) -> (
+	[]byte,
+	io.Error,
+) {
+	if !load_at_once {
+		return hash_stream(algorithm, os.stream_from_handle(hd), allocator)
+	}
+
+	buf, ok := os.read_entire_file(hd, allocator)
+	if !ok {
+		return nil, io.Error.Unknown
+	}
+	defer delete(buf, allocator)
+
+	return hash_bytes(algorithm, buf, allocator), io.Error.None
+}
+
+hash :: proc {
+	hash_stream,
+	hash_file,
+	hash_bytes,
+	hash_string,
+	hash_bytes_to_buffer,
+	hash_string_to_buffer,
+}

+ 353 - 0
core/crypto/hash/low_level.odin

@@ -0,0 +1,353 @@
+package crypto_hash
+
+import "core:crypto/blake2b"
+import "core:crypto/blake2s"
+import "core:crypto/sha2"
+import "core:crypto/sha3"
+import "core:crypto/sm3"
+import "core:crypto/legacy/keccak"
+import "core:crypto/legacy/md5"
+import "core:crypto/legacy/sha1"
+
+import "core:reflect"
+
+// MAX_DIGEST_SIZE is the maximum size digest that can be returned by any
+// of the Algorithms supported via this package.
+MAX_DIGEST_SIZE :: 64
+// MAX_BLOCK_SIZE is the maximum block size used by any of Algorithms
+// supported by this package.
+MAX_BLOCK_SIZE :: sha3.BLOCK_SIZE_224
+
+// Algorithm is the algorithm identifier associated with a given Context.
+Algorithm :: enum {
+	Invalid,
+	BLAKE2B,
+	BLAKE2S,
+	SHA224,
+	SHA256,
+	SHA384,
+	SHA512,
+	SHA512_256,
+	SHA3_224,
+	SHA3_256,
+	SHA3_384,
+	SHA3_512,
+	SM3,
+	Legacy_KECCAK_224,
+	Legacy_KECCAK_256,
+	Legacy_KECCAK_384,
+	Legacy_KECCAK_512,
+	Insecure_MD5,
+	Insecure_SHA1,
+}
+
+// ALGORITHM_NAMES is the Algorithm to algorithm name string.
+ALGORITHM_NAMES := [Algorithm]string {
+	.Invalid           = "Invalid",
+	.BLAKE2B           = "BLAKE2b",
+	.BLAKE2S           = "BLAKE2s",
+	.SHA224            = "SHA-224",
+	.SHA256            = "SHA-256",
+	.SHA384            = "SHA-384",
+	.SHA512            = "SHA-512",
+	.SHA512_256        = "SHA-512/256",
+	.SHA3_224          = "SHA3-224",
+	.SHA3_256          = "SHA3-256",
+	.SHA3_384          = "SHA3-384",
+	.SHA3_512          = "SHA3-512",
+	.SM3               = "SM3",
+	.Legacy_KECCAK_224 = "Keccak-224",
+	.Legacy_KECCAK_256 = "Keccak-256",
+	.Legacy_KECCAK_384 = "Keccak-384",
+	.Legacy_KECCAK_512 = "Keccak-512",
+	.Insecure_MD5      = "MD5",
+	.Insecure_SHA1     = "SHA-1",
+}
+
+// DIGEST_SIZES is the Algorithm to digest size in bytes.
+DIGEST_SIZES := [Algorithm]int {
+	.Invalid           = 0,
+	.BLAKE2B           = blake2b.DIGEST_SIZE,
+	.BLAKE2S           = blake2s.DIGEST_SIZE,
+	.SHA224            = sha2.DIGEST_SIZE_224,
+	.SHA256            = sha2.DIGEST_SIZE_256,
+	.SHA384            = sha2.DIGEST_SIZE_384,
+	.SHA512            = sha2.DIGEST_SIZE_512,
+	.SHA512_256        = sha2.DIGEST_SIZE_512_256,
+	.SHA3_224          = sha3.DIGEST_SIZE_224,
+	.SHA3_256          = sha3.DIGEST_SIZE_256,
+	.SHA3_384          = sha3.DIGEST_SIZE_384,
+	.SHA3_512          = sha3.DIGEST_SIZE_512,
+	.SM3               = sm3.DIGEST_SIZE,
+	.Legacy_KECCAK_224 = keccak.DIGEST_SIZE_224,
+	.Legacy_KECCAK_256 = keccak.DIGEST_SIZE_256,
+	.Legacy_KECCAK_384 = keccak.DIGEST_SIZE_384,
+	.Legacy_KECCAK_512 = keccak.DIGEST_SIZE_512,
+	.Insecure_MD5      = md5.DIGEST_SIZE,
+	.Insecure_SHA1     = sha1.DIGEST_SIZE,
+}
+
+// BLOCK_SIZES is the Algoritm to block size in bytes.
+BLOCK_SIZES := [Algorithm]int {
+	.Invalid           = 0,
+	.BLAKE2B           = blake2b.BLOCK_SIZE,
+	.BLAKE2S           = blake2s.BLOCK_SIZE,
+	.SHA224            = sha2.BLOCK_SIZE_256,
+	.SHA256            = sha2.BLOCK_SIZE_256,
+	.SHA384            = sha2.BLOCK_SIZE_512,
+	.SHA512            = sha2.BLOCK_SIZE_512,
+	.SHA512_256        = sha2.BLOCK_SIZE_512,
+	.SHA3_224          = sha3.BLOCK_SIZE_224,
+	.SHA3_256          = sha3.BLOCK_SIZE_256,
+	.SHA3_384          = sha3.BLOCK_SIZE_384,
+	.SHA3_512          = sha3.BLOCK_SIZE_512,
+	.SM3               = sm3.BLOCK_SIZE,
+	.Legacy_KECCAK_224 = keccak.BLOCK_SIZE_224,
+	.Legacy_KECCAK_256 = keccak.BLOCK_SIZE_256,
+	.Legacy_KECCAK_384 = keccak.BLOCK_SIZE_384,
+	.Legacy_KECCAK_512 = keccak.BLOCK_SIZE_512,
+	.Insecure_MD5      = md5.BLOCK_SIZE,
+	.Insecure_SHA1     = sha1.BLOCK_SIZE,
+}
+
+// Context is a concrete instantiation of a specific hash algorithm.
+Context :: struct {
+	_algo: Algorithm,
+	_impl: union {
+		blake2b.Context,
+		blake2s.Context,
+		sha2.Context_256,
+		sha2.Context_512,
+		sha3.Context,
+		sm3.Context,
+		keccak.Context,
+		md5.Context,
+		sha1.Context,
+	},
+}
+
+@(private)
+_IMPL_IDS := [Algorithm]typeid {
+	.Invalid           = nil,
+	.BLAKE2B           = typeid_of(blake2b.Context),
+	.BLAKE2S           = typeid_of(blake2s.Context),
+	.SHA224            = typeid_of(sha2.Context_256),
+	.SHA256            = typeid_of(sha2.Context_256),
+	.SHA384            = typeid_of(sha2.Context_512),
+	.SHA512            = typeid_of(sha2.Context_512),
+	.SHA512_256        = typeid_of(sha2.Context_512),
+	.SHA3_224          = typeid_of(sha3.Context),
+	.SHA3_256          = typeid_of(sha3.Context),
+	.SHA3_384          = typeid_of(sha3.Context),
+	.SHA3_512          = typeid_of(sha3.Context),
+	.SM3               = typeid_of(sm3.Context),
+	.Legacy_KECCAK_224 = typeid_of(keccak.Context),
+	.Legacy_KECCAK_256 = typeid_of(keccak.Context),
+	.Legacy_KECCAK_384 = typeid_of(keccak.Context),
+	.Legacy_KECCAK_512 = typeid_of(keccak.Context),
+	.Insecure_MD5      = typeid_of(md5.Context),
+	.Insecure_SHA1     = typeid_of(sha1.Context),
+}
+
+// init initializes a Context with a specific hash Algorithm.
+init :: proc(ctx: ^Context, algorithm: Algorithm) {
+	if ctx._impl != nil {
+		reset(ctx)
+	}
+
+	// Directly specialize the union by setting the type ID (save a copy).
+	reflect.set_union_variant_typeid(
+		ctx._impl,
+		_IMPL_IDS[algorithm],
+	)
+	switch algorithm {
+	case .BLAKE2B:
+		blake2b.init(&ctx._impl.(blake2b.Context))
+	case .BLAKE2S:
+		blake2s.init(&ctx._impl.(blake2s.Context))
+	case .SHA224:
+		sha2.init_224(&ctx._impl.(sha2.Context_256))
+	case .SHA256:
+		sha2.init_256(&ctx._impl.(sha2.Context_256))
+	case .SHA384:
+		sha2.init_384(&ctx._impl.(sha2.Context_512))
+	case .SHA512:
+		sha2.init_512(&ctx._impl.(sha2.Context_512))
+	case .SHA512_256:
+		sha2.init_512_256(&ctx._impl.(sha2.Context_512))
+	case .SHA3_224:
+		sha3.init_224(&ctx._impl.(sha3.Context))
+	case .SHA3_256:
+		sha3.init_256(&ctx._impl.(sha3.Context))
+	case .SHA3_384:
+		sha3.init_384(&ctx._impl.(sha3.Context))
+	case .SHA3_512:
+		sha3.init_512(&ctx._impl.(sha3.Context))
+	case .SM3:
+		sm3.init(&ctx._impl.(sm3.Context))
+	case .Legacy_KECCAK_224:
+		keccak.init_224(&ctx._impl.(keccak.Context))
+	case .Legacy_KECCAK_256:
+		keccak.init_256(&ctx._impl.(keccak.Context))
+	case .Legacy_KECCAK_384:
+		keccak.init_384(&ctx._impl.(keccak.Context))
+	case .Legacy_KECCAK_512:
+		keccak.init_512(&ctx._impl.(keccak.Context))
+	case .Insecure_MD5:
+		md5.init(&ctx._impl.(md5.Context))
+	case .Insecure_SHA1:
+		sha1.init(&ctx._impl.(sha1.Context))
+	case .Invalid:
+		panic("crypto/hash: uninitialized algorithm")
+	case:
+		panic("crypto/hash: invalid algorithm")
+	}
+
+	ctx._algo = algorithm
+}
+
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	switch &impl in ctx._impl {
+	case blake2b.Context:
+		blake2b.update(&impl, data)
+	case blake2s.Context:
+		blake2s.update(&impl, data)
+	case sha2.Context_256:
+		sha2.update(&impl, data)
+	case sha2.Context_512:
+		sha2.update(&impl, data)
+	case sha3.Context:
+		sha3.update(&impl, data)
+	case sm3.Context:
+		sm3.update(&impl, data)
+	case keccak.Context:
+		keccak.update(&impl, data)
+	case md5.Context:
+		md5.update(&impl, data)
+	case sha1.Context:
+		sha1.update(&impl, data)
+	case:
+		panic("crypto/hash: uninitialized algorithm")
+	}
+}
+
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	switch &impl in ctx._impl {
+	case blake2b.Context:
+		blake2b.final(&impl, hash, finalize_clone)
+	case blake2s.Context:
+		blake2s.final(&impl, hash, finalize_clone)
+	case sha2.Context_256:
+		sha2.final(&impl, hash, finalize_clone)
+	case sha2.Context_512:
+		sha2.final(&impl, hash, finalize_clone)
+	case sha3.Context:
+		sha3.final(&impl, hash, finalize_clone)
+	case sm3.Context:
+		sm3.final(&impl, hash, finalize_clone)
+	case keccak.Context:
+		keccak.final(&impl, hash, finalize_clone)
+	case md5.Context:
+		md5.final(&impl, hash, finalize_clone)
+	case sha1.Context:
+		sha1.final(&impl, hash, finalize_clone)
+	case:
+		panic("crypto/hash: uninitialized algorithm")
+	}
+
+	if !finalize_clone {
+		reset(ctx)
+	}
+}
+
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	// XXX/yawning: Maybe these cases should panic, because both cases,
+	// are probably bugs.
+	if ctx == other {
+		return
+	}
+	if ctx._impl != nil {
+		reset(ctx)
+	}
+
+	ctx._algo = other._algo
+
+	reflect.set_union_variant_typeid(
+		ctx._impl,
+		reflect.union_variant_typeid(other._impl),
+	)
+	switch &src_impl in other._impl {
+	case blake2b.Context:
+		blake2b.clone(&ctx._impl.(blake2b.Context), &src_impl)
+	case blake2s.Context:
+		blake2s.clone(&ctx._impl.(blake2s.Context), &src_impl)
+	case sha2.Context_256:
+		sha2.clone(&ctx._impl.(sha2.Context_256), &src_impl)
+	case sha2.Context_512:
+		sha2.clone(&ctx._impl.(sha2.Context_512), &src_impl)
+	case sha3.Context:
+		sha3.clone(&ctx._impl.(sha3.Context), &src_impl)
+	case sm3.Context:
+		sm3.clone(&ctx._impl.(sm3.Context), &src_impl)
+	case keccak.Context:
+		keccak.clone(&ctx._impl.(keccak.Context), &src_impl)
+	case md5.Context:
+		md5.clone(&ctx._impl.(md5.Context), &src_impl)
+	case sha1.Context:
+		sha1.clone(&ctx._impl.(sha1.Context), &src_impl)
+	case:
+		panic("crypto/hash: uninitialized algorithm")
+	}
+}
+
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	switch &impl in ctx._impl {
+	case blake2b.Context:
+		blake2b.reset(&impl)
+	case blake2s.Context:
+		blake2s.reset(&impl)
+	case sha2.Context_256:
+		sha2.reset(&impl)
+	case sha2.Context_512:
+		sha2.reset(&impl)
+	case sha3.Context:
+		sha3.reset(&impl)
+	case sm3.Context:
+		sm3.reset(&impl)
+	case keccak.Context:
+		keccak.reset(&impl)
+	case md5.Context:
+		md5.reset(&impl)
+	case sha1.Context:
+		sha1.reset(&impl)
+	case:
+	// Unlike clone, calling reset repeatedly is fine.
+	}
+
+	ctx._algo = .Invalid
+	ctx._impl = nil
+}
+
+// algorithm returns the Algorithm used by a Context instance.
+algorithm :: proc(ctx: ^Context) -> Algorithm {
+	return ctx._algo
+}
+
+// digest_size returns the digest size of a Context instance in bytes.
+digest_size :: proc(ctx: ^Context) -> int {
+	return DIGEST_SIZES[ctx._algo]
+}
+
+// block_size returns the block size of a Context instance in bytes.
+block_size :: proc(ctx: ^Context) -> int {
+	return BLOCK_SIZES[ctx._algo]
+}

+ 162 - 0
core/crypto/hmac/hmac.odin

@@ -0,0 +1,162 @@
+/*
+package hmac implements the HMAC MAC algorithm.
+
+See:
+- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.198-1.pdf
+*/
+package hmac
+
+import "core:crypto"
+import "core:crypto/hash"
+import "core:mem"
+
+// sum will compute the HMAC with the specified algorithm and key
+// over msg, and write the computed digest to dst.  It requires that
+// the dst buffer is the tag size.
+sum :: proc(algorithm: hash.Algorithm, dst, msg, key: []byte) {
+	ctx: Context
+
+	init(&ctx, algorithm, key)
+	update(&ctx, msg)
+	final(&ctx, dst)
+}
+
+// verify will verify the HMAC tag computed with the specified algorithm
+// and key over msg and return true iff the tag is valid.  It requires
+// that the tag is correctly sized.
+verify :: proc(algorithm: hash.Algorithm, tag, msg, key: []byte) -> bool {
+	tag_buf: [hash.MAX_DIGEST_SIZE]byte
+
+	derived_tag := tag_buf[:hash.DIGEST_SIZES[algorithm]]
+	sum(algorithm, derived_tag, msg, key)
+
+	return crypto.compare_constant_time(derived_tag, tag) == 1
+}
+
+// Context is a concrete instantiation of HMAC with a specific hash
+// algorithm.
+Context :: struct {
+	_o_hash:         hash.Context, // H(k ^ ipad) (not finalized)
+	_i_hash:         hash.Context, // H(k ^ opad) (not finalized)
+	_tag_sz:         int,
+	_is_initialized: bool,
+}
+
+// init initializes a Context with a specific hash Algorithm and key.
+init :: proc(ctx: ^Context, algorithm: hash.Algorithm, key: []byte) {
+	if ctx._is_initialized {
+		reset(ctx)
+	}
+
+	_init_hashes(ctx, algorithm, key)
+
+	ctx._tag_sz = hash.DIGEST_SIZES[algorithm]
+	ctx._is_initialized = true
+}
+
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	assert(ctx._is_initialized)
+
+	hash.update(&ctx._i_hash, data)
+}
+
+// final finalizes the Context, writes the tag to dst, and calls
+// reset on the Context.
+final :: proc(ctx: ^Context, dst: []byte) {
+	assert(ctx._is_initialized)
+
+	defer (reset(ctx))
+
+	if len(dst) != ctx._tag_sz {
+		panic("crypto/hmac: invalid destination tag size")
+	}
+
+	hash.final(&ctx._i_hash, dst) // H((k ^ ipad) || text)
+
+	hash.update(&ctx._o_hash, dst) // H((k ^ opad) || H((k ^ ipad) || text))
+	hash.final(&ctx._o_hash, dst)
+}
+
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	if !ctx._is_initialized {
+		return
+	}
+
+	hash.reset(&ctx._o_hash)
+	hash.reset(&ctx._i_hash)
+	ctx._tag_sz = 0
+	ctx._is_initialized = false
+}
+
+// algorithm returns the Algorithm used by a Context instance.
+algorithm :: proc(ctx: ^Context) -> hash.Algorithm {
+	assert(ctx._is_initialized)
+
+	return hash.algorithm(&ctx._i_hash)
+}
+
+// tag_size returns the tag size of a Context instance in bytes.
+tag_size :: proc(ctx: ^Context) -> int {
+	assert(ctx._is_initialized)
+
+	return ctx._tag_sz
+}
+
+@(private)
+_I_PAD :: 0x36
+_O_PAD :: 0x5c
+
+@(private)
+_init_hashes :: proc(ctx: ^Context, algorithm: hash.Algorithm, key: []byte) {
+	K0_buf: [hash.MAX_BLOCK_SIZE]byte
+	kPad_buf: [hash.MAX_BLOCK_SIZE]byte
+
+	kLen := len(key)
+	B := hash.BLOCK_SIZES[algorithm]
+	K0 := K0_buf[:B]
+	defer mem.zero_explicit(raw_data(K0), B)
+
+	switch {
+	case kLen == B, kLen < B:
+		// If the length of K = B: set K0 = K.
+		//
+		// If the length of K < B: append zeros to the end of K to
+		// create a B-byte string K0 (e.g., if K is 20 bytes in
+		// length and B = 64, then K will be appended with 44 zero
+		// bytes x’00’).
+		//
+		// K0 is zero-initialized, so the copy handles both cases.
+		copy(K0, key)
+	case kLen > B:
+		// If the length of K > B: hash K to obtain an L byte string,
+		// then append (B-L) zeros to create a B-byte string K0
+		// (i.e., K0 = H(K) || 00...00).
+		tmpCtx := &ctx._o_hash // Saves allocating a hash.Context.
+		hash.init(tmpCtx, algorithm)
+		hash.update(tmpCtx, key)
+		hash.final(tmpCtx, K0)
+	}
+
+	// Initialize the hashes, and write the padded keys:
+	// - ctx._i_hash -> H(K0 ^ ipad)
+	// - ctx._o_hash -> H(K0 ^ opad)
+
+	hash.init(&ctx._o_hash, algorithm)
+	hash.init(&ctx._i_hash, algorithm)
+
+	kPad := kPad_buf[:B]
+	defer mem.zero_explicit(raw_data(kPad), B)
+
+	for v, i in K0 {
+		kPad[i] = v ~ _I_PAD
+	}
+	hash.update(&ctx._i_hash, kPad)
+
+	for v, i in K0 {
+		kPad[i] = v ~ _O_PAD
+	}
+	hash.update(&ctx._o_hash, kPad)
+}

+ 54 - 336
core/crypto/legacy/keccak/keccak.odin

@@ -1,3 +1,11 @@
+/*
+package keccak implements the Keccak hash algorithm family.
+
+During the SHA-3 standardization process, the padding scheme was changed
+thus Keccac and SHA-3 produce different outputs.  Most users should use
+SHA-3 and/or SHAKE instead, however the legacy algorithm is provided for
+backward compatibility purposes.
+*/
 package keccak
 package keccak
 
 
 /*
 /*
@@ -6,372 +14,82 @@ package keccak
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Interface for the Keccak hashing algorithm.
-    This is done because the padding in the SHA3 standard was changed by the NIST, resulting in a different output.
 */
 */
 
 
-import "core:io"
-import "core:os"
-
 import "../../_sha3"
 import "../../_sha3"
 
 
-/*
-    High level API
-*/
-
+// DIGEST_SIZE_224 is the Keccak-224 digest size.
 DIGEST_SIZE_224 :: 28
 DIGEST_SIZE_224 :: 28
+// DIGEST_SIZE_256 is the Keccak-256 digest size.
 DIGEST_SIZE_256 :: 32
 DIGEST_SIZE_256 :: 32
+// DIGEST_SIZE_384 is the Keccak-384 digest size.
 DIGEST_SIZE_384 :: 48
 DIGEST_SIZE_384 :: 48
+// DIGEST_SIZE_512 is the Keccak-512 digest size.
 DIGEST_SIZE_512 :: 64
 DIGEST_SIZE_512 :: 64
 
 
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-	return hash_bytes_224(transmute([]byte)(data))
-}
+// BLOCK_SIZE_224 is the Keccak-224 block size in bytes.
+BLOCK_SIZE_224 :: _sha3.RATE_224
+// BLOCK_SIZE_256 is the Keccak-256 block size in bytes.
+BLOCK_SIZE_256 :: _sha3.RATE_256
+// BLOCK_SIZE_384 is the Keccak-384 block size in bytes.
+BLOCK_SIZE_384 :: _sha3.RATE_384
+// BLOCK_SIZE_512 is the Keccak-512 block size in bytes.
+BLOCK_SIZE_512 :: _sha3.RATE_512
 
 
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_224
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_224
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
+// Context is a Keccak instance.
+Context :: distinct _sha3.Context
 
 
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context
+// init_224 initializes a Context for Keccak-224.
+init_224 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_224
 	ctx.mdlen = DIGEST_SIZE_224
-	ctx.is_keccak = true
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_init(ctx)
 }
 }
 
 
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-	if !load_at_once {
-		return hash_stream_224(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_224(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-	hash_stream_224,
-	hash_file_224,
-	hash_bytes_224,
-	hash_string_224,
-	hash_bytes_to_buffer_224,
-	hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-	return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
+// init_256 initializes a Context for Keccak-256.
+init_256 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_256
 	ctx.mdlen = DIGEST_SIZE_256
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	ctx.is_keccak = true
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-	if !load_at_once {
-		return hash_stream_256(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_256(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_256]byte{}, false
+	_init(ctx)
 }
 }
 
 
-hash_256 :: proc {
-	hash_stream_256,
-	hash_file_256,
-	hash_bytes_256,
-	hash_string_256,
-	hash_bytes_to_buffer_256,
-	hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-	return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context
+// init_384 initializes a Context for Keccak-384.
+init_384 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_384
 	ctx.mdlen = DIGEST_SIZE_384
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_384
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_384
-	ctx.is_keccak = true
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-	if !load_at_once {
-		return hash_stream_384(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_384(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-	hash_stream_384,
-	hash_file_384,
-	hash_bytes_384,
-	hash_string_384,
-	hash_bytes_to_buffer_384,
-	hash_string_to_buffer_384,
+	_init(ctx)
 }
 }
 
 
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-	return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context
+// init_512 initializes a Context for Keccak-512.
+init_512 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_512
 	ctx.mdlen = DIGEST_SIZE_512
-	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
+	_init(ctx)
 }
 }
 
 
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_512
+@(private)
+_init :: proc(ctx: ^Context) {
 	ctx.is_keccak = true
 	ctx.is_keccak = true
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
+	_sha3.init(transmute(^_sha3.Context)(ctx))
 }
 }
 
 
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_512
-	ctx.is_keccak = true
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-	if !load_at_once {
-		return hash_stream_512(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_512(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-	hash_stream_512,
-	hash_file_512,
-	hash_bytes_512,
-	hash_string_512,
-	hash_bytes_to_buffer_512,
-	hash_string_to_buffer_512,
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(transmute(^_sha3.Context)(ctx), data)
 }
 }
 
 
-/*
-    Low level API
-*/
-
-Context :: _sha3.Sha3_Context
-
-init :: proc(ctx: ^Context) {
-	ctx.is_keccak = true
-	_sha3.init(ctx)
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
 }
 }
 
 
-update :: proc(ctx: ^Context, data: []byte) {
-	_sha3.update(ctx, data)
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
-	_sha3.final(ctx, hash)
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	_sha3.reset(transmute(^_sha3.Context)(ctx))
 }
 }

+ 50 - 98
core/crypto/legacy/md5/md5.odin

@@ -1,3 +1,13 @@
+/*
+package md5 implements the MD5 hash algorithm.
+
+WARNING: The MD5 algorithm is known to be insecure and should only be
+used for interoperating with legacy applications.
+
+See:
+- https://eprint.iacr.org/2005/075
+- https://datatracker.ietf.org/doc/html/rfc1321
+*/
 package md5
 package md5
 
 
 /*
 /*
@@ -6,103 +16,29 @@ package md5
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the MD5 hashing algorithm, as defined in RFC 1321 <https://datatracker.ietf.org/doc/html/rfc1321>
 */
 */
 
 
 import "core:encoding/endian"
 import "core:encoding/endian"
-import "core:io"
 import "core:math/bits"
 import "core:math/bits"
 import "core:mem"
 import "core:mem"
-import "core:os"
-
-/*
-    High level API
-*/
 
 
+// DIGEST_SIZE is the MD5 digest size in bytes.
 DIGEST_SIZE :: 16
 DIGEST_SIZE :: 16
 
 
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-	return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
+// BLOCK_SIZE is the MD5 block size in bytes.
+BLOCK_SIZE :: 64
 
 
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-		return hash_stream(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE]byte{}, false
-}
+// Context is a MD5 instance.
+Context :: struct {
+	data:    [BLOCK_SIZE]byte,
+	state:   [4]u32,
+	bitlen:  u64,
+	datalen: u32,
 
 
-hash :: proc {
-	hash_stream,
-	hash_file,
-	hash_bytes,
-	hash_string,
-	hash_bytes_to_buffer,
-	hash_string_to_buffer,
+	is_initialized: bool,
 }
 }
 
 
-/*
-    Low level API
-*/
-
+// init initializes a Context.
 init :: proc(ctx: ^Context) {
 init :: proc(ctx: ^Context) {
 	ctx.state[0] = 0x67452301
 	ctx.state[0] = 0x67452301
 	ctx.state[1] = 0xefcdab89
 	ctx.state[1] = 0xefcdab89
@@ -115,6 +51,7 @@ init :: proc(ctx: ^Context) {
 	ctx.is_initialized = true
 	ctx.is_initialized = true
 }
 }
 
 
+// update adds more data to the Context.
 update :: proc(ctx: ^Context, data: []byte) {
 update :: proc(ctx: ^Context, data: []byte) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
@@ -129,13 +66,26 @@ update :: proc(ctx: ^Context, data: []byte) {
 	}
 	}
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
 	if len(hash) < DIGEST_SIZE {
 	if len(hash) < DIGEST_SIZE {
 		panic("crypto/md5: invalid destination digest size")
 		panic("crypto/md5: invalid destination digest size")
 	}
 	}
 
 
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: Context
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
 	i := ctx.datalen
 	i := ctx.datalen
 
 
 	if ctx.datalen < 56 {
 	if ctx.datalen < 56 {
@@ -163,25 +113,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
 	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
 	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
 		endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
 		endian.unchecked_put_u32le(hash[i * 4:], ctx.state[i])
 	}
 	}
+}
 
 
-	ctx.is_initialized = false
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^$T) {
+	ctx^ = other^
+}
+
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^$T) {
+	if !ctx.is_initialized {
+		return
+	}
+
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
 /*
 /*
     MD5 implementation
     MD5 implementation
 */
 */
 
 
-BLOCK_SIZE :: 64
-
-Context :: struct {
-	data:    [BLOCK_SIZE]byte,
-	state:   [4]u32,
-	bitlen:  u64,
-	datalen: u32,
-
-	is_initialized: bool,
-}
-
 /*
 /*
     @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
     @note(zh): F, G, H and I, as mentioned in the RFC, have been inlined into FF, GG, HH
     and II respectively, instead of declaring them separately.
     and II respectively, instead of declaring them separately.

+ 52 - 99
core/crypto/legacy/sha1/sha1.odin

@@ -1,3 +1,14 @@
+/*
+package sha1 implements the SHA1 hash algorithm.
+
+WARNING: The SHA1 algorithm is known to be insecure and should only be
+used for interoperating with legacy applications.
+
+See:
+- https://eprint.iacr.org/2017/190
+- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
+- https://datatracker.ietf.org/doc/html/rfc3174
+*/
 package sha1
 package sha1
 
 
 /*
 /*
@@ -6,103 +17,30 @@ package sha1
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the SHA1 hashing algorithm, as defined in RFC 3174 <https://datatracker.ietf.org/doc/html/rfc3174>
 */
 */
 
 
 import "core:encoding/endian"
 import "core:encoding/endian"
-import "core:io"
 import "core:math/bits"
 import "core:math/bits"
 import "core:mem"
 import "core:mem"
-import "core:os"
-
-/*
-    High level API
-*/
 
 
+// DIGEST_SIZE is the SHA1 digest size in bytes.
 DIGEST_SIZE :: 20
 DIGEST_SIZE :: 20
 
 
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-	return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
+// BLOCK_SIZE is the SHA1 block size in bytes.
+BLOCK_SIZE :: 64
 
 
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-		return hash_stream(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE]byte{}, false
-}
+// Context is a SHA1 instance.
+Context :: struct {
+	data:    [BLOCK_SIZE]byte,
+	state:   [5]u32,
+	k:       [4]u32,
+	bitlen:  u64,
+	datalen: u32,
 
 
-hash :: proc {
-	hash_stream,
-	hash_file,
-	hash_bytes,
-	hash_string,
-	hash_bytes_to_buffer,
-	hash_string_to_buffer,
+	is_initialized: bool,
 }
 }
 
 
-/*
-    Low level API
-*/
-
+// init initializes a Context.
 init :: proc(ctx: ^Context) {
 init :: proc(ctx: ^Context) {
 	ctx.state[0] = 0x67452301
 	ctx.state[0] = 0x67452301
 	ctx.state[1] = 0xefcdab89
 	ctx.state[1] = 0xefcdab89
@@ -120,6 +58,7 @@ init :: proc(ctx: ^Context) {
 	ctx.is_initialized = true
 	ctx.is_initialized = true
 }
 }
 
 
+// update adds more data to the Context.
 update :: proc(ctx: ^Context, data: []byte) {
 update :: proc(ctx: ^Context, data: []byte) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
@@ -134,13 +73,26 @@ update :: proc(ctx: ^Context, data: []byte) {
 	}
 	}
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
 	if len(hash) < DIGEST_SIZE {
 	if len(hash) < DIGEST_SIZE {
 		panic("crypto/sha1: invalid destination digest size")
 		panic("crypto/sha1: invalid destination digest size")
 	}
 	}
 
 
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: Context
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
 	i := ctx.datalen
 	i := ctx.datalen
 
 
 	if ctx.datalen < 56 {
 	if ctx.datalen < 56 {
@@ -168,26 +120,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
 	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
 	for i = 0; i < DIGEST_SIZE / 4; i += 1 {
 		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
 		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
 	}
 	}
+}
+
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^$T) {
+	ctx^ = other^
+}
+
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^$T) {
+	if !ctx.is_initialized {
+		return
+	}
 
 
-	ctx.is_initialized = false
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
 /*
 /*
     SHA1 implementation
     SHA1 implementation
 */
 */
 
 
-BLOCK_SIZE :: 64
-
-Context :: struct {
-	data:    [BLOCK_SIZE]byte,
-	datalen: u32,
-	bitlen:  u64,
-	state:   [5]u32,
-	k:       [4]u32,
-
-	is_initialized: bool,
-}
-
 @(private)
 @(private)
 transform :: proc "contextless" (ctx: ^Context, data: []byte) {
 transform :: proc "contextless" (ctx: ^Context, data: []byte) {
 	a, b, c, d, e, i, t: u32
 	a, b, c, d, e, i, t: u32

+ 0 - 4
core/crypto/poly1305/poly1305.odin

@@ -23,10 +23,6 @@ verify :: proc (tag, msg, key: []byte) -> bool {
 	ctx: Context = ---
 	ctx: Context = ---
 	derived_tag: [16]byte = ---
 	derived_tag: [16]byte = ---
 
 
-	if len(tag) != TAG_SIZE {
-		panic("crypto/poly1305: invalid tag size")
-	}
-
 	init(&ctx, key)
 	init(&ctx, key)
 	update(&ctx, msg)
 	update(&ctx, msg)
 	final(&ctx, derived_tag[:])
 	final(&ctx, derived_tag[:])

+ 88 - 425
core/crypto/sha2/sha2.odin

@@ -1,3 +1,10 @@
+/*
+package sha2 implements the SHA2 hash algorithm family.
+
+See:
+- https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf
+- https://datatracker.ietf.org/doc/html/rfc3874
+*/
 package sha2
 package sha2
 
 
 /*
 /*
@@ -6,431 +13,83 @@ package sha2
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the SHA2 hashing algorithm, as defined in <https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf>
-    and in RFC 3874 <https://datatracker.ietf.org/doc/html/rfc3874>
 */
 */
 
 
 import "core:encoding/endian"
 import "core:encoding/endian"
-import "core:io"
 import "core:math/bits"
 import "core:math/bits"
-import "core:os"
-
-/*
-    High level API
-*/
+import "core:mem"
 
 
+// DIGEST_SIZE_224 is the SHA-224 digest size in bytes.
 DIGEST_SIZE_224 :: 28
 DIGEST_SIZE_224 :: 28
+// DIGEST_SIZE_256 is the SHA-256 digest size in bytes.
 DIGEST_SIZE_256 :: 32
 DIGEST_SIZE_256 :: 32
+// DIGEST_SIZE_384 is the SHA-384 digest size in bytes.
 DIGEST_SIZE_384 :: 48
 DIGEST_SIZE_384 :: 48
+// DIGEST_SIZE_512 is the SHA-512 digest size in bytes.
 DIGEST_SIZE_512 :: 64
 DIGEST_SIZE_512 :: 64
+// DIGEST_SIZE_512_256 is the SHA-512/256 digest size in bytes.
 DIGEST_SIZE_512_256 :: 32
 DIGEST_SIZE_512_256 :: 32
 
 
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-	return hash_bytes_224(transmute([]byte)(data))
-}
-
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context_256
-	ctx.md_bits = 224
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-	ctx: Context_256
-	ctx.md_bits = 224
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context_256
-	ctx.md_bits = 224
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
+// BLOCK_SIZE_256 is the SHA-224 and SHA-256 block size in bytes.
+BLOCK_SIZE_256 :: 64
+// BLOCK_SIZE_512 is the SHA-384, SHA-512, and SHA-512/256 block size
+// in bytes.
+BLOCK_SIZE_512 :: 128
 
 
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-	if !load_at_once {
-		return hash_stream_224(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_224(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-	hash_stream_224,
-	hash_file_224,
-	hash_bytes_224,
-	hash_string_224,
-	hash_bytes_to_buffer_224,
-	hash_string_to_buffer_224,
-}
+// Context_256 is a SHA-224 or SHA-256 instance.
+Context_256 :: struct {
+	block:     [BLOCK_SIZE_256]byte,
+	h:         [8]u32,
+	bitlength: u64,
+	length:    u64,
+	md_bits:   int,
 
 
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-	return hash_bytes_256(transmute([]byte)(data))
+	is_initialized: bool,
 }
 }
 
 
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context_256
-	ctx.md_bits = 256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
+// Context_512 is a SHA-384, SHA-512 or SHA-512/256 instance.
+Context_512 :: struct {
+	block:     [BLOCK_SIZE_512]byte,
+	h:         [8]u64,
+	bitlength: u64,
+	length:    u64,
+	md_bits:   int,
 
 
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
+	is_initialized: bool,
 }
 }
 
 
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-	ctx: Context_256
-	ctx.md_bits = 256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
+// init_224 initializes a Context_256 for SHA-224.
+init_224 :: proc(ctx: ^Context_256) {
+	ctx.md_bits = 224
+	_init(ctx)
 }
 }
 
 
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context_256
+// init_256 initializes a Context_256 for SHA-256.
+init_256 :: proc(ctx: ^Context_256) {
 	ctx.md_bits = 256
 	ctx.md_bits = 256
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_init(ctx)
 }
 }
 
 
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-	if !load_at_once {
-		return hash_stream_256(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_256(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-	hash_stream_256,
-	hash_file_256,
-	hash_bytes_256,
-	hash_string_256,
-	hash_bytes_to_buffer_256,
-	hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-	return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context_512
-	ctx.md_bits = 384
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-	ctx: Context_512
-	ctx.md_bits = 384
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context_512
+// init_384 initializes a Context_512 for SHA-384.
+init_384 :: proc(ctx: ^Context_512) {
 	ctx.md_bits = 384
 	ctx.md_bits = 384
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-	if !load_at_once {
-		return hash_stream_384(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_384(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_384]byte{}, false
+	_init(ctx)
 }
 }
 
 
-hash_384 :: proc {
-	hash_stream_384,
-	hash_file_384,
-	hash_bytes_384,
-	hash_string_384,
-	hash_bytes_to_buffer_384,
-	hash_string_to_buffer_384,
-}
-
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-	return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context_512
-	ctx.md_bits = 512
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-	ctx: Context_512
+// init_512 initializes a Context_512 for SHA-512.
+init_512 :: proc(ctx: ^Context_512) {
 	ctx.md_bits = 512
 	ctx.md_bits = 512
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
+	_init(ctx)
 }
 }
 
 
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context_512
-	ctx.md_bits = 512
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-	if !load_at_once {
-		return hash_stream_512(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_512(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_512]byte{}, false
-}
-
-hash_512 :: proc {
-	hash_stream_512,
-	hash_file_512,
-	hash_bytes_512,
-	hash_string_512,
-	hash_bytes_to_buffer_512,
-	hash_string_to_buffer_512,
-}
-
-// hash_string_512_256 will hash the given input and return the
-// computed hash
-hash_string_512_256 :: proc(data: string) -> [DIGEST_SIZE_512_256]byte {
-	return hash_bytes_512_256(transmute([]byte)(data))
-}
-
-// hash_bytes_512_256 will hash the given input and return the
-// computed hash
-hash_bytes_512_256 :: proc(data: []byte) -> [DIGEST_SIZE_512_256]byte {
-	hash: [DIGEST_SIZE_512_256]byte
-	ctx: Context_512
-	ctx.md_bits = 256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_512_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512_256 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_512_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512_256 :: proc(data, hash: []byte) {
-	ctx: Context_512
-	ctx.md_bits = 256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_512_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512_256]byte, bool) {
-	hash: [DIGEST_SIZE_512_256]byte
-	ctx: Context_512
+// init_512_256 initializes a Context_512 for SHA-512/256.
+init_512_256 :: proc(ctx: ^Context_512) {
 	ctx.md_bits = 256
 	ctx.md_bits = 256
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_init(ctx)
 }
 }
 
 
-// hash_file_512_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_512_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512_256]byte, bool) {
-	if !load_at_once {
-		return hash_stream_512_256(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_512_256(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_512_256]byte{}, false
-}
-
-hash_512_256 :: proc {
-	hash_stream_512_256,
-	hash_file_512_256,
-	hash_bytes_512_256,
-	hash_string_512_256,
-	hash_bytes_to_buffer_512_256,
-	hash_string_to_buffer_512_256,
-}
-
-/*
-    Low level API
-*/
-
-init :: proc(ctx: ^$T) {
+@(private)
+_init :: proc(ctx: ^$T) {
 	when T == Context_256 {
 	when T == Context_256 {
 		switch ctx.md_bits {
 		switch ctx.md_bits {
 		case 224:
 		case 224:
@@ -497,13 +156,14 @@ init :: proc(ctx: ^$T) {
 	ctx.is_initialized = true
 	ctx.is_initialized = true
 }
 }
 
 
+// update adds more data to the Context.
 update :: proc(ctx: ^$T, data: []byte) {
 update :: proc(ctx: ^$T, data: []byte) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
 	when T == Context_256 {
 	when T == Context_256 {
-		CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_256
 	} else when T == Context_512 {
 	} else when T == Context_512 {
-		CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_512
 	}
 	}
 
 
 	data := data
 	data := data
@@ -528,21 +188,34 @@ update :: proc(ctx: ^$T, data: []byte) {
 	}
 	}
 }
 }
 
 
-final :: proc(ctx: ^$T, hash: []byte) {
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^$T, hash: []byte, finalize_clone: bool = false) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
 	if len(hash) * 8 < ctx.md_bits {
 	if len(hash) * 8 < ctx.md_bits {
 		panic("crypto/sha2: invalid destination digest size")
 		panic("crypto/sha2: invalid destination digest size")
 	}
 	}
 
 
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: T
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
 	length := ctx.length
 	length := ctx.length
 
 
-	raw_pad: [SHA512_BLOCK_SIZE]byte
+	raw_pad: [BLOCK_SIZE_512]byte
 	when T == Context_256 {
 	when T == Context_256 {
-		CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_256
 		pm_len := 8 // 64-bits for length
 		pm_len := 8 // 64-bits for length
 	} else when T == Context_512 {
 	} else when T == Context_512 {
-		CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_512
 		pm_len := 16 // 128-bits for length
 		pm_len := 16 // 128-bits for length
 	}
 	}
 	pad := raw_pad[:CURR_BLOCK_SIZE]
 	pad := raw_pad[:CURR_BLOCK_SIZE]
@@ -576,37 +249,27 @@ final :: proc(ctx: ^$T, hash: []byte) {
 			endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i])
 			endian.unchecked_put_u64be(hash[i * 8:], ctx.h[i])
 		}
 		}
 	}
 	}
-
-	ctx.is_initialized = false
 }
 }
 
 
-/*
-    SHA2 implementation
-*/
-
-SHA256_BLOCK_SIZE :: 64
-SHA512_BLOCK_SIZE :: 128
-
-Context_256 :: struct {
-	block:     [SHA256_BLOCK_SIZE]byte,
-	h:         [8]u32,
-	bitlength: u64,
-	length:    u64,
-	md_bits:   int,
-
-	is_initialized: bool,
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^$T) {
+	ctx^ = other^
 }
 }
 
 
-Context_512 :: struct {
-	block:     [SHA512_BLOCK_SIZE]byte,
-	h:         [8]u64,
-	bitlength: u64,
-	length:    u64,
-	md_bits:   int,
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^$T) {
+	if !ctx.is_initialized {
+		return
+	}
 
 
-	is_initialized: bool,
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
+/*
+    SHA2 implementation
+*/
+
 @(private)
 @(private)
 sha256_k := [64]u32 {
 sha256_k := [64]u32 {
 	0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
 	0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
@@ -737,12 +400,12 @@ sha2_transf :: proc "contextless" (ctx: ^$T, data: []byte) {
 		w: [64]u32
 		w: [64]u32
 		wv: [8]u32
 		wv: [8]u32
 		t1, t2: u32
 		t1, t2: u32
-		CURR_BLOCK_SIZE :: SHA256_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_256
 	} else when T == Context_512 {
 	} else when T == Context_512 {
 		w: [80]u64
 		w: [80]u64
 		wv: [8]u64
 		wv: [8]u64
 		t1, t2: u64
 		t1, t2: u64
-		CURR_BLOCK_SIZE :: SHA512_BLOCK_SIZE
+		CURR_BLOCK_SIZE :: BLOCK_SIZE_512
 	}
 	}
 
 
 	data := data
 	data := data

+ 56 - 324
core/crypto/sha3/sha3.odin

@@ -1,3 +1,13 @@
+/*
+package sha3 implements the SHA3 hash algorithm family.
+
+The SHAKE XOF can be found in crypto/shake.  While discouraged if the
+pre-standardization Keccak algorithm is required, it can be found in
+crypto/legacy/keccak.
+
+See:
+- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
+*/
 package sha3
 package sha3
 
 
 /*
 /*
@@ -6,359 +16,81 @@ package sha3
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Interface for the SHA3 hashing algorithm. The SHAKE functionality can be found in package shake.
-    If you wish to compute a Keccak hash, you can use the keccak package, it will use the original padding.
 */
 */
 
 
-import "core:io"
-import "core:os"
-
 import "../_sha3"
 import "../_sha3"
 
 
-/*
-    High level API
-*/
-
+// DIGEST_SIZE_224 is the SHA3-224 digest size.
 DIGEST_SIZE_224 :: 28
 DIGEST_SIZE_224 :: 28
+// DIGEST_SIZE_256 is the SHA3-256 digest size.
 DIGEST_SIZE_256 :: 32
 DIGEST_SIZE_256 :: 32
+// DIGEST_SIZE_384 is the SHA3-384 digest size.
 DIGEST_SIZE_384 :: 48
 DIGEST_SIZE_384 :: 48
+// DIGEST_SIZE_512 is the SHA3-512 digest size.
 DIGEST_SIZE_512 :: 64
 DIGEST_SIZE_512 :: 64
 
 
-// hash_string_224 will hash the given input and return the
-// computed hash
-hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte {
-	return hash_bytes_224(transmute([]byte)(data))
-}
+// BLOCK_SIZE_224 is the SHA3-224 block size in bytes.
+BLOCK_SIZE_224 :: _sha3.RATE_224
+// BLOCK_SIZE_256 is the SHA3-256 block size in bytes.
+BLOCK_SIZE_256 :: _sha3.RATE_256
+// BLOCK_SIZE_384 is the SHA3-384 block size in bytes.
+BLOCK_SIZE_384 :: _sha3.RATE_384
+// BLOCK_SIZE_512 is the SHA3-512 block size in bytes.
+BLOCK_SIZE_512 :: _sha3.RATE_512
 
 
-// hash_bytes_224 will hash the given input and return the
-// computed hash
-hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_224
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
+// Context is a SHA3 instance.
+Context :: distinct _sha3.Context
 
 
-// hash_string_to_buffer_224 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_224 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_224(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_224 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_224 :: proc(data, hash: []byte) {
-	ctx: Context
+// init_224 initializes a Context for SHA3-224.
+init_224 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_224
 	ctx.mdlen = DIGEST_SIZE_224
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_224 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) {
-	hash: [DIGEST_SIZE_224]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_224
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_init(ctx)
 }
 }
 
 
-// hash_file_224 will read the file provided by the given handle
-// and compute a hash
-hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) {
-	if !load_at_once {
-		return hash_stream_224(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_224(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_224]byte{}, false
-}
-
-hash_224 :: proc {
-	hash_stream_224,
-	hash_file_224,
-	hash_bytes_224,
-	hash_string_224,
-	hash_bytes_to_buffer_224,
-	hash_string_to_buffer_224,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-	return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
+// init_256 initializes a Context for SHA3-256.
+init_256 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_256
 	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-	if !load_at_once {
-		return hash_stream_256(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_256(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_256]byte{}, false
+	_init(ctx)
 }
 }
 
 
-hash_256 :: proc {
-	hash_stream_256,
-	hash_file_256,
-	hash_bytes_256,
-	hash_string_256,
-	hash_bytes_to_buffer_256,
-	hash_string_to_buffer_256,
-}
-
-// hash_string_384 will hash the given input and return the
-// computed hash
-hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte {
-	return hash_bytes_384(transmute([]byte)(data))
-}
-
-// hash_bytes_384 will hash the given input and return the
-// computed hash
-hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_384
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_384 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_384 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_384(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_384 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_384 :: proc(data, hash: []byte) {
-	ctx: Context
+// init_384 initializes a Context for SHA3-384.
+init_384 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_384
 	ctx.mdlen = DIGEST_SIZE_384
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_384 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) {
-	hash: [DIGEST_SIZE_384]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_384
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_384 will read the file provided by the given handle
-// and compute a hash
-hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) {
-	if !load_at_once {
-		return hash_stream_384(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_384(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_384]byte{}, false
-}
-
-hash_384 :: proc {
-	hash_stream_384,
-	hash_file_384,
-	hash_bytes_384,
-	hash_string_384,
-	hash_bytes_to_buffer_384,
-	hash_string_to_buffer_384,
+	_init(ctx)
 }
 }
 
 
-// hash_string_512 will hash the given input and return the
-// computed hash
-hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte {
-	return hash_bytes_512(transmute([]byte)(data))
-}
-
-// hash_bytes_512 will hash the given input and return the
-// computed hash
-hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_512
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_512 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_512 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_512(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_512 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_512 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_512
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_512 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) {
-	hash: [DIGEST_SIZE_512]byte
-	ctx: Context
+// init_512 initializes a Context for SHA3-512.
+init_512 :: proc(ctx: ^Context) {
 	ctx.mdlen = DIGEST_SIZE_512
 	ctx.mdlen = DIGEST_SIZE_512
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+	_init(ctx)
 }
 }
 
 
-// hash_file_512 will read the file provided by the given handle
-// and compute a hash
-hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) {
-	if !load_at_once {
-		return hash_stream_512(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_512(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_512]byte{}, false
+@(private)
+_init :: proc(ctx: ^Context) {
+	_sha3.init(transmute(^_sha3.Context)(ctx))
 }
 }
 
 
-hash_512 :: proc {
-	hash_stream_512,
-	hash_file_512,
-	hash_bytes_512,
-	hash_string_512,
-	hash_bytes_to_buffer_512,
-	hash_string_to_buffer_512,
+// update adds more data to the Context.
+update :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(transmute(^_sha3.Context)(ctx), data)
 }
 }
 
 
-/*
-    Low level API
-*/
-
-Context :: _sha3.Sha3_Context
-
-init :: proc(ctx: ^Context) {
-	_sha3.init(ctx)
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
+	_sha3.final(transmute(^_sha3.Context)(ctx), hash, finalize_clone)
 }
 }
 
 
-update :: proc(ctx: ^Context, data: []byte) {
-	_sha3.update(ctx, data)
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
-	_sha3.final(ctx, hash)
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	_sha3.reset(transmute(^_sha3.Context)(ctx))
 }
 }

+ 40 - 178
core/crypto/shake/shake.odin

@@ -1,3 +1,11 @@
+/*
+package shake implements the SHAKE XOF algorithm family.
+
+The SHA3 hash algorithm can be found in the crypto/sha3.
+
+See:
+- https://nvlpubs.nist.gov/nistpubs/fips/nist.fips.202.pdf
+*/
 package shake
 package shake
 
 
 /*
 /*
@@ -6,201 +14,55 @@ package shake
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Interface for the SHAKE hashing algorithm.
-    The SHA3 functionality can be found in package sha3.
-
-    TODO: This should provide an incremental squeeze interface, in addition
-    to the one-shot final call.
 */
 */
 
 
-import "core:io"
-import "core:os"
-
 import "../_sha3"
 import "../_sha3"
 
 
-/*
-    High level API
-*/
-
-DIGEST_SIZE_128 :: 16
-DIGEST_SIZE_256 :: 32
-
-// hash_string_128 will hash the given input and return the
-// computed hash
-hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte {
-	return hash_bytes_128(transmute([]byte)(data))
-}
-
-// hash_bytes_128 will hash the given input and return the
-// computed hash
-hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte {
-	hash: [DIGEST_SIZE_128]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_128
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer_128 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_128 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_128(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer_128 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_128 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_128
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream_128 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) {
-	hash: [DIGEST_SIZE_128]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_128
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
+// Context is a SHAKE128 or SHAKE256 instance.
+Context :: distinct _sha3.Context
 
 
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
-
-// hash_file_128 will read the file provided by the given handle
-// and compute a hash
-hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) {
-	if !load_at_once {
-		return hash_stream_128(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_128(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE_128]byte{}, false
-}
-
-hash_128 :: proc {
-	hash_stream_128,
-	hash_file_128,
-	hash_bytes_128,
-	hash_string_128,
-	hash_bytes_to_buffer_128,
-	hash_string_to_buffer_128,
-}
-
-// hash_string_256 will hash the given input and return the
-// computed hash
-hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte {
-	return hash_bytes_256(transmute([]byte)(data))
-}
-
-// hash_bytes_256 will hash the given input and return the
-// computed hash
-hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
+// init_128 initializes a Context for SHAKE128.
+init_128 :: proc(ctx: ^Context) {
+	ctx.mdlen = 128 / 8
+	_init(ctx)
 }
 }
 
 
-// hash_string_to_buffer_256 will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer_256 :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer_256(transmute([]byte)(data), hash)
+// init_256 initializes a Context for SHAKE256.
+init_256 :: proc(ctx: ^Context) {
+	ctx.mdlen = 256 / 8
+	_init(ctx)
 }
 }
 
 
-// hash_bytes_to_buffer_256 will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer_256 :: proc(data, hash: []byte) {
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
+@(private)
+_init :: proc(ctx: ^Context) {
+	_sha3.init(transmute(^_sha3.Context)(ctx))
 }
 }
 
 
-// hash_stream_256 will read the stream in chunks and compute a
-// hash from its contents
-hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) {
-	hash: [DIGEST_SIZE_256]byte
-	ctx: Context
-	ctx.mdlen = DIGEST_SIZE_256
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
+// write writes more data into the SHAKE instance.  This MUST not be called
+// after any reads have been done, and attempts to do so will panic.
+write :: proc(ctx: ^Context, data: []byte) {
+	_sha3.update(transmute(^_sha3.Context)(ctx), data)
 }
 }
 
 
-// hash_file_256 will read the file provided by the given handle
-// and compute a hash
-hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) {
-	if !load_at_once {
-		return hash_stream_256(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes_256(buf[:]), ok
-		}
+// read reads output from the SHAKE instance.  There is no practical upper
+// limit to the amount of data that can be read from SHAKE.  After read has
+// been called one or more times, further calls to write will panic.
+read :: proc(ctx: ^Context, dst: []byte) {
+	ctx_ := transmute(^_sha3.Context)(ctx)
+	if !ctx.is_finalized {
+		_sha3.shake_xof(ctx_)
 	}
 	}
-	return [DIGEST_SIZE_256]byte{}, false
-}
-
-hash_256 :: proc {
-	hash_stream_256,
-	hash_file_256,
-	hash_bytes_256,
-	hash_string_256,
-	hash_bytes_to_buffer_256,
-	hash_string_to_buffer_256,
-}
-
-/*
-    Low level API
-*/
-
-Context :: _sha3.Sha3_Context
 
 
-init :: proc(ctx: ^Context) {
-	_sha3.init(ctx)
+	_sha3.shake_out(ctx_, dst)
 }
 }
 
 
-update :: proc(ctx: ^Context, data: []byte) {
-	_sha3.update(ctx, data)
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	_sha3.clone(transmute(^_sha3.Context)(ctx), transmute(^_sha3.Context)(other))
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
-	_sha3.shake_xof(ctx)
-	_sha3.shake_out(ctx, hash[:])
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	_sha3.reset(transmute(^_sha3.Context)(ctx))
 }
 }

+ 47 - 98
core/crypto/sm3/sm3.odin

@@ -1,3 +1,9 @@
+/*
+package sm3 implements the SM3 hash algorithm.
+
+See:
+- https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
+*/
 package sm3
 package sm3
 
 
 /*
 /*
@@ -6,102 +12,29 @@ package sm3
 
 
     List of contributors:
     List of contributors:
         zhibog, dotbmp:  Initial implementation.
         zhibog, dotbmp:  Initial implementation.
-
-    Implementation of the SM3 hashing algorithm, as defined in <https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02>
 */
 */
 
 
 import "core:encoding/endian"
 import "core:encoding/endian"
-import "core:io"
 import "core:math/bits"
 import "core:math/bits"
-import "core:os"
-
-/*
-    High level API
-*/
+import "core:mem"
 
 
+// DIGEST_SIZE is the SM3 digest size in bytes.
 DIGEST_SIZE :: 32
 DIGEST_SIZE :: 32
 
 
-// hash_string will hash the given input and return the
-// computed hash
-hash_string :: proc(data: string) -> [DIGEST_SIZE]byte {
-	return hash_bytes(transmute([]byte)(data))
-}
-
-// hash_bytes will hash the given input and return the
-// computed hash
-hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash[:])
-	return hash
-}
-
-// hash_string_to_buffer will hash the given input and assign the
-// computed hash to the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_string_to_buffer :: proc(data: string, hash: []byte) {
-	hash_bytes_to_buffer(transmute([]byte)(data), hash)
-}
-
-// hash_bytes_to_buffer will hash the given input and write the
-// computed hash into the second parameter.
-// It requires that the destination buffer is at least as big as the digest size
-hash_bytes_to_buffer :: proc(data, hash: []byte) {
-	ctx: Context
-	init(&ctx)
-	update(&ctx, data)
-	final(&ctx, hash)
-}
-
-// hash_stream will read the stream in chunks and compute a
-// hash from its contents
-hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) {
-	hash: [DIGEST_SIZE]byte
-	ctx: Context
-	init(&ctx)
-
-	buf := make([]byte, 512)
-	defer delete(buf)
-
-	read := 1
-	for read > 0 {
-		read, _ = io.read(s, buf)
-		if read > 0 {
-			update(&ctx, buf[:read])
-		}
-	}
-	final(&ctx, hash[:])
-	return hash, true
-}
+// BLOCK_SIZE is the SM3 block size in bytes.
+BLOCK_SIZE :: 64
 
 
-// hash_file will read the file provided by the given handle
-// and compute a hash
-hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) {
-	if !load_at_once {
-		return hash_stream(os.stream_from_handle(hd))
-	} else {
-		if buf, ok := os.read_entire_file(hd); ok {
-			return hash_bytes(buf[:]), ok
-		}
-	}
-	return [DIGEST_SIZE]byte{}, false
-}
+// Context is a SM3 instance.
+Context :: struct {
+	state:     [8]u32,
+	x:         [BLOCK_SIZE]byte,
+	bitlength: u64,
+	length:    u64,
 
 
-hash :: proc {
-	hash_stream,
-	hash_file,
-	hash_bytes,
-	hash_string,
-	hash_bytes_to_buffer,
-	hash_string_to_buffer,
+	is_initialized: bool,
 }
 }
 
 
-/*
-    Low level API
-*/
-
+// init initializes a Context.
 init :: proc(ctx: ^Context) {
 init :: proc(ctx: ^Context) {
 	ctx.state[0] = IV[0]
 	ctx.state[0] = IV[0]
 	ctx.state[1] = IV[1]
 	ctx.state[1] = IV[1]
@@ -118,6 +51,7 @@ init :: proc(ctx: ^Context) {
 	ctx.is_initialized = true
 	ctx.is_initialized = true
 }
 }
 
 
+// update adds more data to the Context.
 update :: proc(ctx: ^Context, data: []byte) {
 update :: proc(ctx: ^Context, data: []byte) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
@@ -143,13 +77,26 @@ update :: proc(ctx: ^Context, data: []byte) {
 	}
 	}
 }
 }
 
 
-final :: proc(ctx: ^Context, hash: []byte) {
+// final finalizes the Context, writes the digest to hash, and calls
+// reset on the Context.
+//
+// Iff finalize_clone is set, final will work on a copy of the Context,
+// which is useful for for calculating rolling digests.
+final :: proc(ctx: ^Context, hash: []byte, finalize_clone: bool = false) {
 	assert(ctx.is_initialized)
 	assert(ctx.is_initialized)
 
 
 	if len(hash) < DIGEST_SIZE {
 	if len(hash) < DIGEST_SIZE {
 		panic("crypto/sm3: invalid destination digest size")
 		panic("crypto/sm3: invalid destination digest size")
 	}
 	}
 
 
+	ctx := ctx
+	if finalize_clone {
+		tmp_ctx: Context
+		clone(&tmp_ctx, ctx)
+		ctx = &tmp_ctx
+	}
+	defer(reset(ctx))
+
 	length := ctx.length
 	length := ctx.length
 
 
 	pad: [BLOCK_SIZE]byte
 	pad: [BLOCK_SIZE]byte
@@ -168,25 +115,27 @@ final :: proc(ctx: ^Context, hash: []byte) {
 	for i := 0; i < DIGEST_SIZE / 4; i += 1 {
 	for i := 0; i < DIGEST_SIZE / 4; i += 1 {
 		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
 		endian.unchecked_put_u32be(hash[i * 4:], ctx.state[i])
 	}
 	}
+}
 
 
-	ctx.is_initialized = false
+// clone clones the Context other into ctx.
+clone :: proc(ctx, other: ^Context) {
+	ctx^ = other^
+}
+
+// reset sanitizes the Context.  The Context must be re-initialized to
+// be used again.
+reset :: proc(ctx: ^Context) {
+	if !ctx.is_initialized {
+		return
+	}
+
+	mem.zero_explicit(ctx, size_of(ctx^))
 }
 }
 
 
 /*
 /*
     SM3 implementation
     SM3 implementation
 */
 */
 
 
-BLOCK_SIZE :: 64
-
-Context :: struct {
-	state:     [8]u32,
-	x:         [BLOCK_SIZE]byte,
-	bitlength: u64,
-	length:    u64,
-
-	is_initialized: bool,
-}
-
 @(private)
 @(private)
 IV := [8]u32 {
 IV := [8]u32 {
 	0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,
 	0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600,

+ 18 - 25
core/dynlib/lib.odin

@@ -123,40 +123,34 @@ Returns:
 
 
 See doc.odin for an example.
 See doc.odin for an example.
 */
 */
-initialize_symbols :: proc(symbol_table: ^$T, library_path: string, symbol_prefix := "", handle_field_name := "__handle") -> (count: int, ok: bool) where intrinsics.type_is_struct(T) {
+initialize_symbols :: proc(
+	symbol_table: ^$T, library_path: string,
+	symbol_prefix := "", handle_field_name := "__handle",
+) -> (count: int = -1, ok: bool = false) where intrinsics.type_is_struct(T) {
 	assert(symbol_table != nil)
 	assert(symbol_table != nil)
-	handle: Library
 
 
-	if handle, ok = load_library(library_path); !ok {
-		return -1, false
-	}
-
-	// `symbol_table` must be a struct because of the where clause, so this can't fail.
-	ti := runtime.type_info_base(type_info_of(T))
-	s, _ := ti.variant.(runtime.Type_Info_Struct)
+	handle := load_library(library_path) or_return
 
 
 	// Buffer to concatenate the prefix + symbol name.
 	// Buffer to concatenate the prefix + symbol name.
 	prefixed_symbol_buf: [2048]u8 = ---
 	prefixed_symbol_buf: [2048]u8 = ---
 
 
-	sym_ptr: rawptr
-	for field_name, i in s.names {
+	count = 0
+	for field, i in reflect.struct_fields_zipped(T) {
 		// Calculate address of struct member
 		// Calculate address of struct member
-		field_ptr := rawptr(uintptr(rawptr(symbol_table)) + uintptr(s.offsets[i]))
+		field_ptr := rawptr(uintptr(symbol_table) + field.offset)
 
 
 		// If we've come across the struct member for the handle, store it and continue scanning for other symbols.
 		// If we've come across the struct member for the handle, store it and continue scanning for other symbols.
-		if field_name == handle_field_name {
+		if field.name == handle_field_name {
 			// We appear to be hot reloading. Unload previous incarnation of the library.
 			// We appear to be hot reloading. Unload previous incarnation of the library.
 			if old_handle := (^Library)(field_ptr)^; old_handle != nil {
 			if old_handle := (^Library)(field_ptr)^; old_handle != nil {
-				if ok = unload_library(old_handle); !ok {
-					return count, ok
-				}
+				unload_library(old_handle) or_return
 			}
 			}
 			(^Library)(field_ptr)^ = handle
 			(^Library)(field_ptr)^ = handle
 			continue
 			continue
 		}
 		}
 
 
 		// We're not the library handle, so the field needs to be a pointer type, be it a procedure pointer or an exported global.
 		// We're not the library handle, so the field needs to be a pointer type, be it a procedure pointer or an exported global.
-		if !(reflect.is_procedure(s.types[i]) || reflect.is_pointer(s.types[i])) {
+		if !(reflect.is_procedure(field.type) || reflect.is_pointer(field.type)) {
 			continue
 			continue
 		}
 		}
 
 
@@ -164,22 +158,21 @@ initialize_symbols :: proc(symbol_table: ^$T, library_path: string, symbol_prefi
 		prefixed_name: string
 		prefixed_name: string
 
 
 		// Do we have a symbol override tag?
 		// Do we have a symbol override tag?
-		if override, tag_ok := reflect.struct_tag_lookup(reflect.Struct_Tag(s.tags[i]), "dynlib"); tag_ok {
-			prefixed_name = string(override)
+		if override, tag_ok := reflect.struct_tag_lookup(field.tag, "dynlib"); tag_ok {
+			prefixed_name = override
 		}
 		}
 
 
 		// No valid symbol override tag found, fall back to `<symbol_prefix>name`.
 		// No valid symbol override tag found, fall back to `<symbol_prefix>name`.
 		if len(prefixed_name) == 0 {
 		if len(prefixed_name) == 0 {
 			offset := copy(prefixed_symbol_buf[:], symbol_prefix)
 			offset := copy(prefixed_symbol_buf[:], symbol_prefix)
-			copy(prefixed_symbol_buf[offset:], field_name)
-			prefixed_name = string(prefixed_symbol_buf[:len(symbol_prefix) + len(field_name)])
+			copy(prefixed_symbol_buf[offset:], field.name)
+			prefixed_name = string(prefixed_symbol_buf[:len(symbol_prefix) + len(field.name)])
 		}
 		}
 
 
 		// Assign procedure (or global) pointer if found.
 		// Assign procedure (or global) pointer if found.
-		if sym_ptr, ok = symbol_address(handle, prefixed_name); ok {
-			(^rawptr)(field_ptr)^ = sym_ptr
-			count += 1
-		}
+		sym_ptr := symbol_address(handle, prefixed_name) or_continue
+		(^rawptr)(field_ptr)^ = sym_ptr
+		count += 1
 	}
 	}
 	return count, count > 0
 	return count, count > 0
 }
 }

+ 25 - 0
core/math/linalg/general.odin

@@ -267,6 +267,31 @@ to_ptr :: proc{vector_to_ptr, matrix_to_ptr}
 
 
 
 
 
 
+vector_angle_between :: proc "contextless" (a, b: $V/[$N]$E) -> E {
+	a0 := normalize0(a)
+	b0 := normalize0(b)
+	return math.acos(dot(a0, b0))
+}
+quaternion64_angle_between :: proc "contextless" (a, b: $Q/quaternion64) -> f16 {
+	c := normalize0(conj(a) * b)
+	return math.acos(c.w)
+}
+quaternion128_angle_between :: proc "contextless" (a, b: $Q/quaternion128) -> f32 {
+	c := normalize0(conj(a) * b)
+	return math.acos(c.w)
+}
+quaternion256_angle_between :: proc "contextless" (a, b: $Q/quaternion256) -> f64 {
+	c := normalize0(conj(a) * b)
+	return math.acos(c.w)
+}
+angle_between :: proc{
+	vector_angle_between,
+	quaternion64_angle_between,
+	quaternion128_angle_between,
+	quaternion256_angle_between,
+}
+
+
 
 
 // Splines
 // Splines
 
 

+ 37 - 0
core/math/linalg/specific.odin

@@ -1270,6 +1270,43 @@ matrix2_adjoint :: proc{
 }
 }
 
 
 
 
+@(require_results)
+matrix2_rotate_f16 :: proc "contextless" (angle_radians: f16) -> Matrix2f16 {
+	c := math.cos(angle_radians)
+	s := math.sin(angle_radians)
+
+	return Matrix2f16{
+		c, -s,
+		s,  c,
+	}
+}
+@(require_results)
+matrix2_rotate_f32 :: proc "contextless" (angle_radians: f32) -> Matrix2f32 {
+	c := math.cos(angle_radians)
+	s := math.sin(angle_radians)
+
+	return Matrix2f32{
+		c, -s,
+		s,  c,
+	}
+}
+@(require_results)
+matrix2_rotate_f64 :: proc "contextless" (angle_radians: f64) -> Matrix2f64 {
+	c := math.cos(angle_radians)
+	s := math.sin(angle_radians)
+
+	return Matrix2f64{
+		c, -s,
+		s,  c,
+	}
+}
+matrix2_rotate :: proc{
+	matrix2_rotate_f16,
+	matrix2_rotate_f32,
+	matrix2_rotate_f64,
+}
+
+
 @(require_results)
 @(require_results)
 matrix3_from_quaternion_f16 :: proc "contextless" (q: Quaternionf16) -> (m: Matrix3f16) {
 matrix3_from_quaternion_f16 :: proc "contextless" (q: Quaternionf16) -> (m: Matrix3f16) {
 	qxx := q.x * q.x
 	qxx := q.x * q.x

+ 20 - 0
core/math/rand/rand.odin

@@ -834,3 +834,23 @@ choice :: proc(array: $T/[]$E, r: ^Rand = nil) -> (res: E) {
 	}
 	}
 	return array[int63_max(n, r)]
 	return array[int63_max(n, r)]
 }
 }
+
+
+@(require_results)
+choice_enum :: proc($T: typeid, r: ^Rand = nil) -> T
+	where
+		intrinsics.type_is_enum(T),
+		size_of(T) <= 8,
+		len(T) == cap(T) /* Only allow contiguous enum types */
+{
+	when intrinsics.type_is_unsigned(intrinsics.type_core_type(T)) &&
+	     u64(max(T)) > u64(max(i64)) {
+		i := uint64(r) % u64(len(T))
+		i += u64(min(T))
+		return T(i)
+	} else {
+		i := int63_max(i64(len(T)), r)
+		i += i64(min(T))
+		return T(i)
+	}
+}

+ 2 - 2
core/mem/virtual/arena.odin

@@ -143,8 +143,8 @@ arena_static_reset_to :: proc(arena: ^Arena, pos: uint, loc := #caller_location)
 		prev_pos := arena.curr_block.used
 		prev_pos := arena.curr_block.used
 		arena.curr_block.used = clamp(pos, 0, arena.curr_block.reserved)
 		arena.curr_block.used = clamp(pos, 0, arena.curr_block.reserved)
 
 
-		if prev_pos < pos {
-			mem.zero_slice(arena.curr_block.base[arena.curr_block.used:][:pos-prev_pos])
+		if prev_pos > pos {
+			mem.zero_slice(arena.curr_block.base[arena.curr_block.used:][:prev_pos-pos])
 		}
 		}
 		arena.total_used = arena.curr_block.used
 		arena.total_used = arena.curr_block.used
 		return true
 		return true

+ 46 - 11
core/os/os_darwin.odin

@@ -527,6 +527,7 @@ get_last_error_string :: proc() -> string {
 	return cast(string)_darwin_string_error(cast(c.int)get_last_error())
 	return cast(string)_darwin_string_error(cast(c.int)get_last_error())
 }
 }
 
 
+
 open :: proc(path: string, flags: int = O_RDWR, mode: int = 0) -> (Handle, Errno) {
 open :: proc(path: string, flags: int = O_RDWR, mode: int = 0) -> (Handle, Errno) {
 	isDir := is_dir_path(path)
 	isDir := is_dir_path(path)
 	flags := flags
 	flags := flags
@@ -568,15 +569,24 @@ close :: proc(fd: Handle) -> bool {
 	return _unix_close(fd) == 0
 	return _unix_close(fd) == 0
 }
 }
 
 
+// If you read or write more than `SSIZE_MAX` bytes, most darwin implementations will return `EINVAL`
+// but it is really implementation defined. `SSIZE_MAX` is also implementation defined but usually
+// the max of an i32 on Darwin.
+// In practice a read/write call would probably never read/write these big buffers all at once,
+// which is why the number of bytes is returned and why there are procs that will call this in a
+// loop for you.
+// We set a max of 1GB to keep alignment and to be safe.
 @(private)
 @(private)
-MAX_RW :: 0x7fffffff // The limit on Darwin is max(i32), trying to read/write more than that fails.
+MAX_RW :: 1 << 30
 
 
 write :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 write :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_written := _unix_write(fd, raw_data(data), c.size_t(len(data)))
+	to_write := min(c.size_t(len(data)), MAX_RW)
+
+	bytes_written := _unix_write(fd, raw_data(data), to_write)
 	if bytes_written < 0 {
 	if bytes_written < 0 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
@@ -588,18 +598,23 @@ read :: proc(fd: Handle, data: []u8) -> (int, Errno) {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_read := _unix_read(fd, raw_data(data), c.size_t(len(data)))
+	to_read := min(c.size_t(len(data)), MAX_RW)
+
+	bytes_read := _unix_read(fd, raw_data(data), to_read)
 	if bytes_read < 0 {
 	if bytes_read < 0 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
 	return bytes_read, ERROR_NONE
 	return bytes_read, ERROR_NONE
 }
 }
+
 read_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 read_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_read := _unix_pread(fd, raw_data(data), c.size_t(len(data)), offset)
+	to_read := min(c.size_t(len(data)), MAX_RW)
+
+	bytes_read := _unix_pread(fd, raw_data(data), to_read, offset)
 	if bytes_read < 0 {
 	if bytes_read < 0 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
@@ -611,7 +626,9 @@ write_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_written := _unix_pwrite(fd, raw_data(data), c.size_t(len(data)), offset)
+	to_write := min(c.size_t(len(data)), MAX_RW)
+
+	bytes_written := _unix_pwrite(fd, raw_data(data), to_write, offset)
 	if bytes_written < 0 {
 	if bytes_written < 0 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
@@ -642,10 +659,24 @@ stdin:  Handle = 0 // get_std_handle(win32.STD_INPUT_HANDLE);
 stdout: Handle = 1 // get_std_handle(win32.STD_OUTPUT_HANDLE);
 stdout: Handle = 1 // get_std_handle(win32.STD_OUTPUT_HANDLE);
 stderr: Handle = 2 // get_std_handle(win32.STD_ERROR_HANDLE);
 stderr: Handle = 2 // get_std_handle(win32.STD_ERROR_HANDLE);
 
 
-/* TODO(zangent): Implement these!
-last_write_time :: proc(fd: Handle) -> File_Time {}
-last_write_time_by_name :: proc(name: string) -> File_Time {}
-*/
+last_write_time :: proc(fd: Handle) -> (File_Time, Errno) {
+	s, err := _fstat(fd)
+	if err != ERROR_NONE {
+		return 0, err
+	}
+	modified := s.modified.seconds * 1_000_000_000 + s.modified.nanoseconds
+	return File_Time(modified), ERROR_NONE
+}
+
+last_write_time_by_name :: proc(name: string) -> (File_Time, Errno) {
+	s, err := _stat(name)
+	if err != ERROR_NONE {
+		return 0, err
+	}
+	modified := s.modified.seconds * 1_000_000_000 + s.modified.nanoseconds
+	return File_Time(modified), ERROR_NONE
+}
+
 
 
 is_path_separator :: proc(r: rune) -> bool {
 is_path_separator :: proc(r: rune) -> bool {
 	return r == '/'
 	return r == '/'
@@ -713,10 +744,14 @@ rename :: proc(old: string, new: string) -> bool {
 	return _unix_rename(old_cstr, new_cstr) != -1
 	return _unix_rename(old_cstr, new_cstr) != -1
 }
 }
 
 
-remove :: proc(path: string) -> bool {
+remove :: proc(path: string) -> Errno {
 	runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
 	runtime.DEFAULT_TEMP_ALLOCATOR_TEMP_GUARD()
 	path_cstr := strings.clone_to_cstring(path, context.temp_allocator)
 	path_cstr := strings.clone_to_cstring(path, context.temp_allocator)
-	return _unix_remove(path_cstr) != -1
+	res := _unix_remove(path_cstr)
+	if res == -1 {
+		return Errno(get_last_error())
+	}
+	return ERROR_NONE
 }
 }
 
 
 @private
 @private

+ 13 - 2
core/os/os_freebsd.odin

@@ -326,8 +326,17 @@ close :: proc(fd: Handle) -> Errno {
 	return ERROR_NONE
 	return ERROR_NONE
 }
 }
 
 
+// If you read or write more than `INT_MAX` bytes, FreeBSD returns `EINVAL`.
+// In practice a read/write call would probably never read/write these big buffers all at once,
+// which is why the number of bytes is returned and why there are procs that will call this in a
+// loop for you.
+// We set a max of 1GB to keep alignment and to be safe.
+@(private)
+MAX_RW :: 1 << 30
+
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
-	bytes_read := _unix_read(fd, &data[0], c.size_t(len(data)))
+	to_read    := min(c.size_t(len(data)), MAX_RW)
+	bytes_read := _unix_read(fd, &data[0], to_read)
 	if bytes_read == -1 {
 	if bytes_read == -1 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
@@ -338,7 +347,9 @@ write :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
-	bytes_written := _unix_write(fd, &data[0], c.size_t(len(data)))
+
+	to_write      := min(c.size_t(len(data)), MAX_RW)
+	bytes_written := _unix_write(fd, &data[0], to_write)
 	if bytes_written == -1 {
 	if bytes_written == -1 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}

+ 22 - 4
core/os/os_linux.odin

@@ -569,12 +569,23 @@ close :: proc(fd: Handle) -> Errno {
 	return _get_errno(unix.sys_close(int(fd)))
 	return _get_errno(unix.sys_close(int(fd)))
 }
 }
 
 
+// If you read or write more than `SSIZE_MAX` bytes, result is implementation defined (probably an error).
+// `SSIZE_MAX` is also implementation defined but usually the max of a `ssize_t` which is `max(int)` in Odin.
+// In practice a read/write call would probably never read/write these big buffers all at once,
+// which is why the number of bytes is returned and why there are procs that will call this in a
+// loop for you.
+// We set a max of 1GB to keep alignment and to be safe.
+@(private)
+MAX_RW :: 1 << 30
+
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_read := unix.sys_read(int(fd), raw_data(data), len(data))
+	to_read := min(uint(len(data)), MAX_RW)
+
+	bytes_read := unix.sys_read(int(fd), raw_data(data), to_read)
 	if bytes_read < 0 {
 	if bytes_read < 0 {
 		return -1, _get_errno(bytes_read)
 		return -1, _get_errno(bytes_read)
 	}
 	}
@@ -586,18 +597,23 @@ write :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_written := unix.sys_write(int(fd), raw_data(data), len(data))
+	to_write := min(uint(len(data)), MAX_RW)
+
+	bytes_written := unix.sys_write(int(fd), raw_data(data), to_write)
 	if bytes_written < 0 {
 	if bytes_written < 0 {
 		return -1, _get_errno(bytes_written)
 		return -1, _get_errno(bytes_written)
 	}
 	}
 	return bytes_written, ERROR_NONE
 	return bytes_written, ERROR_NONE
 }
 }
+
 read_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 read_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_read := unix.sys_pread(int(fd), raw_data(data), len(data), offset)
+	to_read := min(uint(len(data)), MAX_RW)
+
+	bytes_read := unix.sys_pread(int(fd), raw_data(data), to_read, offset)
 	if bytes_read < 0 {
 	if bytes_read < 0 {
 		return -1, _get_errno(bytes_read)
 		return -1, _get_errno(bytes_read)
 	}
 	}
@@ -609,7 +625,9 @@ write_at :: proc(fd: Handle, data: []byte, offset: i64) -> (int, Errno) {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
 
 
-	bytes_written := unix.sys_pwrite(int(fd), raw_data(data), uint(len(data)), offset)
+	to_write := min(uint(len(data)), MAX_RW)
+
+	bytes_written := unix.sys_pwrite(int(fd), raw_data(data), to_write, offset)
 	if bytes_written < 0 {
 	if bytes_written < 0 {
 		return -1, _get_errno(bytes_written)
 		return -1, _get_errno(bytes_written)
 	}
 	}

+ 13 - 2
core/os/os_openbsd.odin

@@ -325,8 +325,17 @@ close :: proc(fd: Handle) -> Errno {
 	return ERROR_NONE
 	return ERROR_NONE
 }
 }
 
 
+// If you read or write more than `SSIZE_MAX` bytes, OpenBSD returns `EINVAL`.
+// In practice a read/write call would probably never read/write these big buffers all at once,
+// which is why the number of bytes is returned and why there are procs that will call this in a
+// loop for you.
+// We set a max of 1GB to keep alignment and to be safe.
+@(private)
+MAX_RW :: 1 << 30
+
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 read :: proc(fd: Handle, data: []byte) -> (int, Errno) {
-	bytes_read := _unix_read(fd, &data[0], c.size_t(len(data)))
+	to_read    := min(c.size_t(len(data)), MAX_RW)
+	bytes_read := _unix_read(fd, &data[0], to_read)
 	if bytes_read == -1 {
 	if bytes_read == -1 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}
@@ -337,7 +346,9 @@ write :: proc(fd: Handle, data: []byte) -> (int, Errno) {
 	if len(data) == 0 {
 	if len(data) == 0 {
 		return 0, ERROR_NONE
 		return 0, ERROR_NONE
 	}
 	}
-	bytes_written := _unix_write(fd, &data[0], c.size_t(len(data)))
+
+	to_write      := min(c.size_t(len(data)), MAX_RW)
+	bytes_written := _unix_write(fd, &data[0], to_write)
 	if bytes_written == -1 {
 	if bytes_written == -1 {
 		return -1, Errno(get_last_error())
 		return -1, Errno(get_last_error())
 	}
 	}

+ 13 - 0
core/os/stream.odin

@@ -27,19 +27,31 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
 	case .Read:
 	case .Read:
 		n_int, os_err = read(fd, p)
 		n_int, os_err = read(fd, p)
 		n = i64(n_int)
 		n = i64(n_int)
+		if n == 0 && os_err == 0 {
+			err = .EOF
+		}
 
 
 	case .Read_At:
 	case .Read_At:
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 			n_int, os_err = read_at(fd, p, offset)
 			n_int, os_err = read_at(fd, p, offset)
 			n = i64(n_int)
 			n = i64(n_int)
+			if n == 0 && os_err == 0 {
+				err = .EOF
+			}
 		}
 		}
 	case .Write:
 	case .Write:
 		n_int, os_err = write(fd, p)
 		n_int, os_err = write(fd, p)
 		n = i64(n_int)
 		n = i64(n_int)
+		if n == 0 && os_err == 0 {
+			err = .EOF
+		}
 	case .Write_At:
 	case .Write_At:
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 		when !(ODIN_OS == .FreeBSD || ODIN_OS == .OpenBSD) {
 			n_int, os_err = write_at(fd, p, offset)
 			n_int, os_err = write_at(fd, p, offset)
 			n = i64(n_int)
 			n = i64(n_int)
+			if n == 0 && os_err == 0 {
+				err = .EOF
+			}
 		}
 		}
 	case .Seek:
 	case .Seek:
 		n, os_err = seek(fd, offset, int(whence))
 		n, os_err = seek(fd, offset, int(whence))
@@ -54,6 +66,7 @@ _file_stream_proc :: proc(stream_data: rawptr, mode: io.Stream_Mode, p: []byte,
 			return io.query_utility({.Close, .Flush, .Read, .Read_At, .Write, .Write_At, .Seek, .Size, .Query})
 			return io.query_utility({.Close, .Flush, .Read, .Read_At, .Write, .Write_At, .Seek, .Size, .Query})
 		}
 		}
 	}
 	}
+
 	if err == nil && os_err != 0 {
 	if err == nil && os_err != 0 {
 		when ODIN_OS == .Windows {
 		when ODIN_OS == .Windows {
 			if os_err == ERROR_HANDLE_EOF {
 			if os_err == ERROR_HANDLE_EOF {

+ 11 - 16
core/path/filepath/path.odin

@@ -356,28 +356,24 @@ Relative_Error :: enum {
 */
 */
 rel :: proc(base_path, target_path: string, allocator := context.allocator) -> (string, Relative_Error) {
 rel :: proc(base_path, target_path: string, allocator := context.allocator) -> (string, Relative_Error) {
 	context.allocator = allocator
 	context.allocator = allocator
-	base_clean, target_clean := clean(base_path), clean(target_path)
-
-	delete_target := true
-	defer {
-		if delete_target {
-			delete(target_clean)
-		}
-		delete(base_clean)
-	}
+	base_clean   := clean(base_path,   allocator)
+	target_clean := clean(target_path, allocator)
+	defer delete(base_clean,   allocator)
+	defer delete(target_clean, allocator)
 
 
 	if strings.equal_fold(target_clean, base_clean) {
 	if strings.equal_fold(target_clean, base_clean) {
-		return strings.clone("."), .None
+		return strings.clone(".", allocator), .None
 	}
 	}
 
 
-	base_vol, target_vol := volume_name(base_path), volume_name(target_path)
-	base := base_clean[len(base_vol):]
+	base_vol   := volume_name(base_path)
+	target_vol := volume_name(target_path)
+	base   := base_clean  [len(base_vol):]
 	target := target_clean[len(target_vol):]
 	target := target_clean[len(target_vol):]
 	if base == "." {
 	if base == "." {
 		base = ""
 		base = ""
 	}
 	}
 
 
-	base_slashed := len(base) > 0 && base[0] == SEPARATOR
+	base_slashed   := len(base)   > 0 && base  [0] == SEPARATOR
 	target_slashed := len(target) > 0 && target[0] == SEPARATOR
 	target_slashed := len(target) > 0 && target[0] == SEPARATOR
 	if base_slashed != target_slashed || !strings.equal_fold(base_vol, target_vol) {
 	if base_slashed != target_slashed || !strings.equal_fold(base_vol, target_vol) {
 		return "", .Cannot_Relate
 		return "", .Cannot_Relate
@@ -413,7 +409,7 @@ rel :: proc(base_path, target_path: string, allocator := context.allocator) -> (
 		if tl != t0 {
 		if tl != t0 {
 			size += 1 + tl - t0
 			size += 1 + tl - t0
 		}
 		}
-		buf := make([]byte, size)
+		buf := make([]byte, size, allocator)
 		n := copy(buf, "..")
 		n := copy(buf, "..")
 		for _ in 0..<seps {
 		for _ in 0..<seps {
 			buf[n] = SEPARATOR
 			buf[n] = SEPARATOR
@@ -427,8 +423,7 @@ rel :: proc(base_path, target_path: string, allocator := context.allocator) -> (
 		return string(buf), .None
 		return string(buf), .None
 	}
 	}
 
 
-	delete_target = false
-	return target[t0:], .None
+	return strings.clone(target[t0:], allocator), .None
 }
 }
 
 
 /*
 /*

+ 29 - 17
core/prof/spall/doc.odin

@@ -1,26 +1,38 @@
 /*
 /*
-import "core:prof/spall"
+	import "core:prof/spall"
 
 
-spall_ctx: spall.Context
-spall_buffer: spall.Buffer
+	spall_ctx: spall.Context
+	spall_buffer: spall.Buffer
 
 
-foo :: proc() {
-	spall.SCOPED_EVENT(&spall_ctx, &spall_buffer, #procedure)
-}
+	foo :: proc() {
+		spall.SCOPED_EVENT(&spall_ctx, &spall_buffer, #procedure)
+	}
 
 
-main :: proc() {
-    spall_ctx = spall.context_create("trace_test.spall")
-    defer spall.context_destroy(&spall_ctx)
+	main :: proc() {
+		spall_ctx = spall.context_create("trace_test.spall")
+		defer spall.context_destroy(&spall_ctx)
 
 
-    buffer_backing := make([]u8, spall.BUFFER_DEFAULT_SIZE)
-    spall_buffer = spall.buffer_create(buffer_backing)
-    defer spall.buffer_destroy(&spall_ctx, &spall_buffer)
+		buffer_backing := make([]u8, spall.BUFFER_DEFAULT_SIZE)
+		spall_buffer = spall.buffer_create(buffer_backing)
+		defer spall.buffer_destroy(&spall_ctx, &spall_buffer)
 
 
-    spall.SCOPED_EVENT(&spall_ctx, &spall_buffer, #procedure)
+		spall.SCOPED_EVENT(&spall_ctx, &spall_buffer, #procedure)
 
 
-    for i := 0; i < 9001; i += 1 {
-		foo()
-    }
-}
+		for i := 0; i < 9001; i += 1 {
+			foo()
+		}
+	}
+
+	// Automatic profiling of every procedure:
+
+	@(instrumentation_enter)
+	spall_enter :: proc "contextless" (proc_address, call_site_return_address: rawptr, loc: runtime.Source_Code_Location) {
+		spall._buffer_begin(&spall_ctx, &spall_buffer, "", "", loc)
+	}
+
+	@(instrumentation_exit)
+	spall_exit :: proc "contextless" (proc_address, call_site_return_address: rawptr, loc: runtime.Source_Code_Location) {
+		spall._buffer_end(&spall_ctx, &spall_buffer)
+	}
 */
 */
 package spall
 package spall

+ 25 - 10
core/prof/spall/spall.odin

@@ -3,7 +3,6 @@ package spall
 import "core:os"
 import "core:os"
 import "core:time"
 import "core:time"
 import "base:intrinsics"
 import "base:intrinsics"
-import "core:mem"
 
 
 // File Format
 // File Format
 
 
@@ -111,9 +110,10 @@ buffer_create :: proc(data: []byte, tid: u32 = 0, pid: u32 = 0) -> (buffer: Buff
 	return
 	return
 }
 }
 
 
-buffer_flush :: proc(ctx: ^Context, buffer: ^Buffer) {
+@(no_instrumentation)
+buffer_flush :: proc "contextless" (ctx: ^Context, buffer: ^Buffer) #no_bounds_check /* bounds check would segfault instrumentation */ {
 	start := _trace_now(ctx)
 	start := _trace_now(ctx)
-	os.write(ctx.fd, buffer.data[:buffer.head])
+	write(ctx.fd, buffer.data[:buffer.head])
 	buffer.head = 0
 	buffer.head = 0
 	end := _trace_now(ctx)
 	end := _trace_now(ctx)
 
 
@@ -140,15 +140,16 @@ _scoped_buffer_end :: proc(ctx: ^Context, buffer: ^Buffer, _, _: string, _ := #c
 	_buffer_end(ctx, buffer)
 	_buffer_end(ctx, buffer)
 }
 }
 
 
-
+@(no_instrumentation)
 _trace_now :: proc "contextless" (ctx: ^Context) -> f64 {
 _trace_now :: proc "contextless" (ctx: ^Context) -> f64 {
 	if !ctx.precise_time {
 	if !ctx.precise_time {
-		return f64(time.tick_now()._nsec) / 1_000
+		return f64(tick_now()) / 1_000
 	}
 	}
 
 
 	return f64(intrinsics.read_cycle_counter())
 	return f64(intrinsics.read_cycle_counter())
 }
 }
 
 
+@(no_instrumentation)
 _build_header :: proc "contextless" (buffer: []u8, timestamp_scale: f64) -> (header_size: int, ok: bool) #optional_ok {
 _build_header :: proc "contextless" (buffer: []u8, timestamp_scale: f64) -> (header_size: int, ok: bool) #optional_ok {
 	header_size = size_of(Manual_Header)
 	header_size = size_of(Manual_Header)
 	if header_size > len(buffer) {
 	if header_size > len(buffer) {
@@ -164,7 +165,8 @@ _build_header :: proc "contextless" (buffer: []u8, timestamp_scale: f64) -> (hea
 	return
 	return
 }
 }
 
 
-_build_begin :: proc "contextless" (buffer: []u8, name: string, args: string, ts: f64, tid: u32, pid: u32) -> (event_size: int, ok: bool) #optional_ok {
+@(no_instrumentation)
+_build_begin :: #force_inline proc "contextless" (buffer: []u8, name: string, args: string, ts: f64, tid: u32, pid: u32) -> (event_size: int, ok: bool) #optional_ok #no_bounds_check /* bounds check would segfault instrumentation */ {
 	ev := (^Begin_Event)(raw_data(buffer))
 	ev := (^Begin_Event)(raw_data(buffer))
 	name_len := min(len(name), 255)
 	name_len := min(len(name), 255)
 	args_len := min(len(args), 255)
 	args_len := min(len(args), 255)
@@ -180,13 +182,14 @@ _build_begin :: proc "contextless" (buffer: []u8, name: string, args: string, ts
 	ev.ts   = f64le(ts)
 	ev.ts   = f64le(ts)
 	ev.name_len = u8(name_len)
 	ev.name_len = u8(name_len)
 	ev.args_len = u8(args_len)
 	ev.args_len = u8(args_len)
-	mem.copy(raw_data(buffer[size_of(Begin_Event):]), raw_data(name), name_len)
-	mem.copy(raw_data(buffer[size_of(Begin_Event)+name_len:]), raw_data(args), args_len)
+	intrinsics.mem_copy_non_overlapping(raw_data(buffer[size_of(Begin_Event):]), raw_data(name), name_len)
+	intrinsics.mem_copy_non_overlapping(raw_data(buffer[size_of(Begin_Event)+name_len:]), raw_data(args), args_len)
 	ok = true
 	ok = true
 
 
 	return
 	return
 }
 }
 
 
+@(no_instrumentation)
 _build_end :: proc "contextless" (buffer: []u8, ts: f64, tid: u32, pid: u32) -> (event_size: int, ok: bool) #optional_ok {
 _build_end :: proc "contextless" (buffer: []u8, ts: f64, tid: u32, pid: u32) -> (event_size: int, ok: bool) #optional_ok {
 	ev := (^End_Event)(raw_data(buffer))
 	ev := (^End_Event)(raw_data(buffer))
 	event_size = size_of(End_Event)
 	event_size = size_of(End_Event)
@@ -203,7 +206,8 @@ _build_end :: proc "contextless" (buffer: []u8, ts: f64, tid: u32, pid: u32) ->
 	return
 	return
 }
 }
 
 
-_buffer_begin :: proc(ctx: ^Context, buffer: ^Buffer, name: string, args: string = "", location := #caller_location) {
+@(no_instrumentation)
+_buffer_begin :: proc "contextless" (ctx: ^Context, buffer: ^Buffer, name: string, args: string = "", location := #caller_location) #no_bounds_check /* bounds check would segfault instrumentation */ {
 	if buffer.head + BEGIN_EVENT_MAX > len(buffer.data) {
 	if buffer.head + BEGIN_EVENT_MAX > len(buffer.data) {
 		buffer_flush(ctx, buffer)
 		buffer_flush(ctx, buffer)
 	}
 	}
@@ -211,7 +215,8 @@ _buffer_begin :: proc(ctx: ^Context, buffer: ^Buffer, name: string, args: string
 	buffer.head += _build_begin(buffer.data[buffer.head:], name, args, _trace_now(ctx), buffer.tid, buffer.pid)
 	buffer.head += _build_begin(buffer.data[buffer.head:], name, args, _trace_now(ctx), buffer.tid, buffer.pid)
 }
 }
 
 
-_buffer_end :: proc(ctx: ^Context, buffer: ^Buffer) {
+@(no_instrumentation)
+_buffer_end :: proc "contextless" (ctx: ^Context, buffer: ^Buffer) #no_bounds_check /* bounds check would segfault instrumentation */ {
 	ts := _trace_now(ctx)
 	ts := _trace_now(ctx)
 
 
 	if buffer.head + size_of(End_Event) > len(buffer.data) {
 	if buffer.head + size_of(End_Event) > len(buffer.data) {
@@ -220,3 +225,13 @@ _buffer_end :: proc(ctx: ^Context, buffer: ^Buffer) {
 
 
 	buffer.head += _build_end(buffer.data[buffer.head:], ts, buffer.tid, buffer.pid)
 	buffer.head += _build_end(buffer.data[buffer.head:], ts, buffer.tid, buffer.pid)
 }
 }
+
+@(no_instrumentation)
+write :: proc "contextless" (fd: os.Handle, buf: []byte) -> (n: int, err: os.Errno) {
+	return _write(fd, buf)
+}
+
+@(no_instrumentation)
+tick_now :: proc "contextless" () -> (ns: i64) {
+	return _tick_now()
+}

+ 36 - 0
core/prof/spall/spall_linux.odin

@@ -0,0 +1,36 @@
+//+private
+package spall
+
+// Only for types and constants.
+import "core:os"
+
+// Package is `//+no-instrumentation`, safe to use.
+import "core:sys/linux"
+
+MAX_RW :: 0x7fffffff
+
+@(no_instrumentation)
+_write :: proc "contextless" (fd: os.Handle, data: []byte) -> (n: int, err: os.Errno) #no_bounds_check /* bounds check would segfault instrumentation */ {
+	if len(data) == 0 {
+		return 0, os.ERROR_NONE
+	}
+	
+	for n < len(data) {
+		chunk := data[:min(len(data), MAX_RW)]
+		written, errno := linux.write(linux.Fd(fd), chunk)
+		if errno != .NONE {
+			return n, os.Errno(errno)
+		}
+		n += written
+	}
+
+	return n, os.ERROR_NONE
+}
+
+CLOCK_MONOTONIC_RAW :: 4 // NOTE(tetra): "RAW" means: Not adjusted by NTP.
+
+@(no_instrumentation)
+_tick_now :: proc "contextless" () -> (ns: i64) {
+	t, _ := linux.clock_gettime(.MONOTONIC_RAW)
+	return i64(t.time_sec)*1e9 + i64(t.time_nsec)
+}

+ 57 - 0
core/prof/spall/spall_unix.odin

@@ -0,0 +1,57 @@
+//+private
+//+build darwin, freebsd, openbsd
+package spall
+
+// Only for types.
+import "core:os"
+
+when ODIN_OS == .Darwin {
+	foreign import libc "system:System.framework"
+} else {
+	foreign import libc "system:c"
+}
+
+timespec :: struct {
+	tv_sec:  i64, // seconds
+	tv_nsec: i64, // nanoseconds
+}
+
+foreign libc {
+	__error :: proc() -> ^i32 ---
+	@(link_name="write")         _unix_write         :: proc(handle: os.Handle, buffer: rawptr, count: uint) -> int ---
+	@(link_name="clock_gettime") _unix_clock_gettime :: proc(clock_id: u64, timespec: ^timespec) -> i32 ---
+}
+
+@(no_instrumentation)
+get_last_error :: proc "contextless" () -> int {
+	return int(__error()^)
+}
+
+MAX_RW :: 0x7fffffff
+
+@(no_instrumentation)
+_write :: proc "contextless" (fd: os.Handle, data: []byte) -> (n: int, err: os.Errno) #no_bounds_check /* bounds check would segfault instrumentation */ {
+	if len(data) == 0 {
+		return 0, os.ERROR_NONE
+	}
+
+	for n < len(data) {
+		chunk := data[:min(len(data), MAX_RW)]
+		written := _unix_write(fd, raw_data(chunk), len(chunk))
+		if written < 0 {
+			return n, os.Errno(get_last_error())
+		}
+		n += written
+	}
+
+	return n, os.ERROR_NONE
+}
+
+CLOCK_MONOTONIC_RAW :: 4 // NOTE(tetra): "RAW" means: Not adjusted by NTP.
+
+@(no_instrumentation)
+_tick_now :: proc "contextless" () -> (ns: i64) {
+	t: timespec
+	_unix_clock_gettime(CLOCK_MONOTONIC_RAW, &t)
+	return t.tv_sec*1e9 + t.tv_nsec
+}

+ 54 - 0
core/prof/spall/spall_windows.odin

@@ -0,0 +1,54 @@
+//+private
+package spall
+
+// Only for types.
+import "core:os"
+
+// Package is `//+no-instrumentation`, safe to use.
+import win32 "core:sys/windows"
+
+MAX_RW :: 1<<30
+
+@(no_instrumentation)
+_write :: proc "contextless" (fd: os.Handle, data: []byte) -> (int, os.Errno) #no_bounds_check /* bounds check would segfault instrumentation */ {
+	if len(data) == 0 {
+		return 0, os.ERROR_NONE
+	}
+
+	single_write_length: win32.DWORD
+	total_write: i64
+	length := i64(len(data))
+
+	for total_write < length {
+		remaining := length - total_write
+		to_write := win32.DWORD(min(i32(remaining), MAX_RW))
+
+		e := win32.WriteFile(win32.HANDLE(fd), &data[total_write], to_write, &single_write_length, nil)
+		if single_write_length <= 0 || !e {
+			err := os.Errno(win32.GetLastError())
+			return int(total_write), err
+		}
+		total_write += i64(single_write_length)
+	}
+	return int(total_write), os.ERROR_NONE
+}
+
+@(no_instrumentation)
+_tick_now :: proc "contextless" () -> (ns: i64) {
+	@(no_instrumentation)
+	mul_div_u64 :: #force_inline proc "contextless" (val, num, den: i64) -> i64 {
+		q := val / den
+		r := val % den
+		return q * num + r * num / den
+	}
+
+	@thread_local qpc_frequency: win32.LARGE_INTEGER
+
+	if qpc_frequency == 0 {
+		win32.QueryPerformanceFrequency(&qpc_frequency)
+	}
+	now: win32.LARGE_INTEGER
+	win32.QueryPerformanceCounter(&now)
+
+	return mul_div_u64(i64(now), 1e9, i64(qpc_frequency))
+}

+ 171 - 0
core/relative/relative.odin

@@ -0,0 +1,171 @@
+package relative_types
+
+import "base:intrinsics"
+
+Pointer :: struct($Type: typeid, $Backing: typeid)
+	where
+		intrinsics.type_is_pointer(Type) || intrinsics.type_is_multi_pointer(Type),
+		intrinsics.type_is_integer(Backing) {
+	offset: Backing,
+}
+
+Slice :: struct($Type: typeid, $Backing: typeid)
+	where
+		intrinsics.type_is_slice(Type),
+		intrinsics.type_is_integer(Backing) {
+	offset: Backing,
+	len:    Backing,
+}
+
+
+
+@(require_results)
+pointer_get :: proc "contextless" (p: ^$P/Pointer($T, $B)) -> T {
+	if p.offset == 0 {
+		return nil
+	}
+	ptr := ([^]byte)(p)[p.offset:]
+	return (T)(ptr)
+}
+
+pointer_set :: proc "contextless" (p: ^$P/Pointer($T, $B), ptr: T) {
+	if ptr == nil {
+		p.offset = 0
+	} else {
+		p.offset = B(int(uintptr(ptr)) - int(uintptr(p)))
+	}
+}
+
+@(require_results)
+slice_get :: proc "contextless" (p: ^$S/Slice($T/[]$E, $B)) -> (slice: T) {
+	if p.offset == 0 {
+		when size_of(E) == 0 {
+			slice = T(([^]E)(nil)[:p.len])
+		}
+	} else {
+		ptr := ([^]E)(([^]byte)(p)[p.offset:])
+		slice = T(ptr[:p.len])
+	}
+	return
+}
+
+slice_set :: proc "contextless" (p: ^$S/Slice($T, $B), slice: T) {
+	if slice == nil {
+		p.offset, p.len = 0, 0
+	} else {
+		ptr := raw_data(slice)
+		p.offset = B(int(uintptr(ptr)) - int(uintptr(p)))
+		p.len    = B(len(slice))
+	}
+}
+
+get :: proc{
+	pointer_get,
+	slice_get,
+}
+
+set :: proc{
+	pointer_set,
+	slice_set,
+}
+
+
+
+Set_Safe_Error :: enum {
+	None,
+	Memory_Too_Far_Apart,
+	Length_Out_Of_Bounds,
+}
+
+
+@(require_results)
+pointer_set_safe :: proc "contextless" (p: ^$P/Pointer($T, $B), ptr: T) -> Set_Safe_Error {
+	if ptr == nil {
+		p.offset = 0
+	} else {
+		when intrinsics.type_is_unsigned(B) {
+			diff := uint(uintptr(ptr) - uintptr(p))
+			when size_of(B) < size_of(uint) {
+				if diff > uint(max(B)) {
+					return .Memory_Too_Far_Apart
+				}
+			} else {
+				if B(diff) > max(B) {
+					return .Memory_Too_Far_Apart
+				}
+			}
+		} else {
+			diff := int(uintptr(ptr)) - int(uintptr(p))
+			when size_of(B) < size_of(int) {
+				if diff > int(max(B)) {
+					return .Memory_Too_Far_Apart
+				}
+			} else {
+				if B(diff) > max(B) {
+					return .Memory_Too_Far_Apart
+				}
+			}
+		}
+		p.offset = B(diff)
+	}
+	return .None
+}
+
+@(require_results)
+slice_set_safe :: proc "contextless" (p: ^$S/Slice($T, $B), slice: T) -> Set_Safe_Error {
+	if slice == nil {
+		p.offset, p.len = 0, 0
+	} else {
+		ptr := raw_data(slice)
+		when intrinsics.type_is_unsigned(B) {
+			diff := uint(uintptr(ptr) - uintptr(p))
+			when size_of(B) < size_of(uint) {
+				if diff > uint(max(B)) {
+					return .Memory_Too_Far_Apart
+				}
+
+				if uint(len(slice)) > uint(max(B)) {
+					return .Length_Out_Of_Bounds
+				}
+			} else {
+				if B(diff) > max(B) {
+					return .Memory_Too_Far_Apart
+				}
+				if B(len(slice)) > max(B) {
+					return .Length_Out_Of_Bounds
+				}
+			}
+			p.offset = B(diff)
+			p.len = B(len(slice))
+		} else {
+			diff := int(uintptr(ptr)) - int(uintptr(p))
+			when size_of(B) < size_of(int) {
+				if diff > int(max(B)) {
+					return .Memory_Too_Far_Apart
+				}
+				if len(slice) > int(max(B)) || len(slice) < int(min(B)) {
+					return .Length_Out_Of_Bounds
+				}
+			} else {
+				if B(diff) > max(B) {
+					return .Memory_Too_Far_Apart
+				}
+				if B(len(slice)) > max(B) {
+					return .Length_Out_Of_Bounds
+				}
+				if B(len(slice)) > max(B) || B(len(slice)) < min(B) {
+					return .Length_Out_Of_Bounds
+				}
+			}
+		}
+		p.offset = B(diff)
+		p.len = B(len(slice))
+	}
+	return .None
+}
+
+
+set_safe :: proc{
+	pointer_set_safe,
+	slice_set_safe,
+}

+ 1 - 0
core/sys/linux/helpers.odin

@@ -1,4 +1,5 @@
 //+build linux
 //+build linux
+//+no-instrumentation
 package linux
 package linux
 
 
 import "base:intrinsics"
 import "base:intrinsics"

+ 6 - 1
core/sys/linux/sys.odin

@@ -1,3 +1,4 @@
+//+no-instrumentation
 package linux
 package linux
 
 
 import "base:intrinsics"
 import "base:intrinsics"
@@ -2394,7 +2395,11 @@ timer_delete :: proc "contextless" (timer: Timer) -> (Errno) {
 
 
 // TODO(flysand): clock_settime
 // TODO(flysand): clock_settime
 
 
-// TODO(flysand): clock_gettime
+clock_gettime :: proc "contextless" (clock: Clock_Id) -> (ts: Time_Spec, err: Errno) {
+	ret := syscall(SYS_clock_gettime, clock, &ts)
+	err = Errno(-ret)
+	return
+}
 
 
 // TODO(flysand): clock_getres
 // TODO(flysand): clock_getres
 
 

+ 1 - 0
core/sys/windows/kernel32.odin

@@ -130,6 +130,7 @@ foreign kernel32 {
 	ResumeThread :: proc(thread: HANDLE) -> DWORD ---
 	ResumeThread :: proc(thread: HANDLE) -> DWORD ---
 	GetThreadPriority :: proc(thread: HANDLE) -> c_int ---
 	GetThreadPriority :: proc(thread: HANDLE) -> c_int ---
 	SetThreadPriority :: proc(thread: HANDLE, priority: c_int) -> BOOL ---
 	SetThreadPriority :: proc(thread: HANDLE, priority: c_int) -> BOOL ---
+	SetThreadDescription :: proc(hThread: HANDLE, lpThreadDescription: PCWSTR) -> HRESULT ---
 	GetExitCodeThread :: proc(thread: HANDLE, exit_code: ^DWORD) -> BOOL ---
 	GetExitCodeThread :: proc(thread: HANDLE, exit_code: ^DWORD) -> BOOL ---
 	TerminateThread :: proc(thread: HANDLE, exit_code: DWORD) -> BOOL ---
 	TerminateThread :: proc(thread: HANDLE, exit_code: DWORD) -> BOOL ---
 	SuspendThread :: proc(hThread: HANDLE) -> DWORD ---
 	SuspendThread :: proc(hThread: HANDLE) -> DWORD ---

+ 1 - 0
core/sys/windows/user32.odin

@@ -53,6 +53,7 @@ foreign user32 {
 	DispatchMessageW :: proc(lpMsg: ^MSG) -> LRESULT ---
 	DispatchMessageW :: proc(lpMsg: ^MSG) -> LRESULT ---
 
 
 	WaitMessage :: proc() -> BOOL ---
 	WaitMessage :: proc() -> BOOL ---
+	MsgWaitForMultipleObjects :: proc(nCount: DWORD, pHandles: ^HANDLE, fWaitAll: bool, dwMilliseconds: DWORD, dwWakeMask: DWORD) -> DWORD ---
 
 
 	PeekMessageA :: proc(lpMsg: ^MSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT) -> BOOL ---
 	PeekMessageA :: proc(lpMsg: ^MSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT) -> BOOL ---
 	PeekMessageW :: proc(lpMsg: ^MSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT) -> BOOL ---
 	PeekMessageW :: proc(lpMsg: ^MSG, hWnd: HWND, wMsgFilterMin: UINT, wMsgFilterMax: UINT, wRemoveMsg: UINT) -> BOOL ---

+ 4 - 0
examples/all/all_main.odin

@@ -27,6 +27,8 @@ import blake2b          "core:crypto/blake2b"
 import blake2s          "core:crypto/blake2s"
 import blake2s          "core:crypto/blake2s"
 import chacha20         "core:crypto/chacha20"
 import chacha20         "core:crypto/chacha20"
 import chacha20poly1305 "core:crypto/chacha20poly1305"
 import chacha20poly1305 "core:crypto/chacha20poly1305"
+import crypto_hash      "core:crypto/hash"
+import hmac             "core:crypto/hmac"
 import keccak           "core:crypto/legacy/keccak"
 import keccak           "core:crypto/legacy/keccak"
 import md5              "core:crypto/legacy/md5"
 import md5              "core:crypto/legacy/md5"
 import sha1             "core:crypto/legacy/sha1"
 import sha1             "core:crypto/legacy/sha1"
@@ -137,10 +139,12 @@ _ :: lru
 _ :: list
 _ :: list
 _ :: topological_sort
 _ :: topological_sort
 _ :: crypto
 _ :: crypto
+_ :: crypto_hash
 _ :: blake2b
 _ :: blake2b
 _ :: blake2s
 _ :: blake2s
 _ :: chacha20
 _ :: chacha20
 _ :: chacha20poly1305
 _ :: chacha20poly1305
+_ :: hmac
 _ :: keccak
 _ :: keccak
 _ :: md5
 _ :: md5
 _ :: poly1305
 _ :: poly1305

+ 19 - 13
src/build_settings.cpp

@@ -876,7 +876,7 @@ gb_internal String internal_odin_root_dir(void) {
 
 
 #include <mach-o/dyld.h>
 #include <mach-o/dyld.h>
 
 
-gb_internal String path_to_fullpath(gbAllocator a, String s);
+gb_internal String path_to_fullpath(gbAllocator a, String s, bool *ok_);
 
 
 gb_internal String internal_odin_root_dir(void) {
 gb_internal String internal_odin_root_dir(void) {
 	String path = global_module_path;
 	String path = global_module_path;
@@ -907,7 +907,7 @@ gb_internal String internal_odin_root_dir(void) {
 	text = gb_alloc_array(permanent_allocator(), u8, len + 1);
 	text = gb_alloc_array(permanent_allocator(), u8, len + 1);
 	gb_memmove(text, &path_buf[0], len);
 	gb_memmove(text, &path_buf[0], len);
 
 
-	path = path_to_fullpath(heap_allocator(), make_string(text, len));
+	path = path_to_fullpath(heap_allocator(), make_string(text, len), nullptr);
 
 
 	for (i = path.len-1; i >= 0; i--) {
 	for (i = path.len-1; i >= 0; i--) {
 		u8 c = path[i];
 		u8 c = path[i];
@@ -930,7 +930,7 @@ gb_internal String internal_odin_root_dir(void) {
 // NOTE: Linux / Unix is unfinished and not tested very well.
 // NOTE: Linux / Unix is unfinished and not tested very well.
 #include <sys/stat.h>
 #include <sys/stat.h>
 
 
-gb_internal String path_to_fullpath(gbAllocator a, String s);
+gb_internal String path_to_fullpath(gbAllocator a, String s, bool *ok_);
 
 
 gb_internal String internal_odin_root_dir(void) {
 gb_internal String internal_odin_root_dir(void) {
 	String path = global_module_path;
 	String path = global_module_path;
@@ -1072,7 +1072,7 @@ gb_internal String internal_odin_root_dir(void) {
 
 
 	gb_memmove(text, &path_buf[0], len);
 	gb_memmove(text, &path_buf[0], len);
 
 
-	path = path_to_fullpath(heap_allocator(), make_string(text, len));
+	path = path_to_fullpath(heap_allocator(), make_string(text, len), nullptr);
 	for (i = path.len-1; i >= 0; i--) {
 	for (i = path.len-1; i >= 0; i--) {
 		u8 c = path[i];
 		u8 c = path[i];
 		if (c == '/' || c == '\\') {
 		if (c == '/' || c == '\\') {
@@ -1091,7 +1091,7 @@ gb_internal String internal_odin_root_dir(void) {
 gb_global BlockingMutex fullpath_mutex;
 gb_global BlockingMutex fullpath_mutex;
 
 
 #if defined(GB_SYSTEM_WINDOWS)
 #if defined(GB_SYSTEM_WINDOWS)
-gb_internal String path_to_fullpath(gbAllocator a, String s) {
+gb_internal String path_to_fullpath(gbAllocator a, String s, bool *ok_) {
 	String result = {};
 	String result = {};
 
 
 	String16 string16 = string_to_string16(heap_allocator(), s);
 	String16 string16 = string_to_string16(heap_allocator(), s);
@@ -1117,19 +1117,25 @@ gb_internal String path_to_fullpath(gbAllocator a, String s) {
 				result.text[i] = '/';
 				result.text[i] = '/';
 			}
 			}
 		}
 		}
+		if (ok_) *ok_ = true;
 	} else {
 	} else {
+		if (ok_) *ok_ = false;
 		mutex_unlock(&fullpath_mutex);
 		mutex_unlock(&fullpath_mutex);
 	}
 	}
 
 
 	return result;
 	return result;
 }
 }
 #elif defined(GB_SYSTEM_OSX) || defined(GB_SYSTEM_UNIX)
 #elif defined(GB_SYSTEM_OSX) || defined(GB_SYSTEM_UNIX)
-gb_internal String path_to_fullpath(gbAllocator a, String s) {
+gb_internal String path_to_fullpath(gbAllocator a, String s, bool *ok_) {
 	char *p;
 	char *p;
 	mutex_lock(&fullpath_mutex);
 	mutex_lock(&fullpath_mutex);
 	p = realpath(cast(char *)s.text, 0);
 	p = realpath(cast(char *)s.text, 0);
 	mutex_unlock(&fullpath_mutex);
 	mutex_unlock(&fullpath_mutex);
-	if(p == nullptr) return String{};
+	if(p == nullptr) {
+		if (ok_) *ok_ = false;
+		return String{};
+	}
+	if (ok_) *ok_ = true;
 	return make_string_c(p);
 	return make_string_c(p);
 }
 }
 #else
 #else
@@ -1137,7 +1143,7 @@ gb_internal String path_to_fullpath(gbAllocator a, String s) {
 #endif
 #endif
 
 
 
 
-gb_internal String get_fullpath_relative(gbAllocator a, String base_dir, String path) {
+gb_internal String get_fullpath_relative(gbAllocator a, String base_dir, String path, bool *ok_) {
 	u8 *str = gb_alloc_array(heap_allocator(), u8, base_dir.len+1+path.len+1);
 	u8 *str = gb_alloc_array(heap_allocator(), u8, base_dir.len+1+path.len+1);
 	defer (gb_free(heap_allocator(), str));
 	defer (gb_free(heap_allocator(), str));
 
 
@@ -1159,11 +1165,11 @@ gb_internal String get_fullpath_relative(gbAllocator a, String base_dir, String
 
 
 	String res = make_string(str, i);
 	String res = make_string(str, i);
 	res = string_trim_whitespace(res);
 	res = string_trim_whitespace(res);
-	return path_to_fullpath(a, res);
+	return path_to_fullpath(a, res, ok_);
 }
 }
 
 
 
 
-gb_internal String get_fullpath_base_collection(gbAllocator a, String path) {
+gb_internal String get_fullpath_base_collection(gbAllocator a, String path, bool *ok_) {
 	String module_dir = odin_root_dir();
 	String module_dir = odin_root_dir();
 
 
 	String base = str_lit("base/");
 	String base = str_lit("base/");
@@ -1180,10 +1186,10 @@ gb_internal String get_fullpath_base_collection(gbAllocator a, String path) {
 
 
 	String res = make_string(str, i);
 	String res = make_string(str, i);
 	res = string_trim_whitespace(res);
 	res = string_trim_whitespace(res);
-	return path_to_fullpath(a, res);
+	return path_to_fullpath(a, res, ok_);
 }
 }
 
 
-gb_internal String get_fullpath_core_collection(gbAllocator a, String path) {
+gb_internal String get_fullpath_core_collection(gbAllocator a, String path, bool *ok_) {
 	String module_dir = odin_root_dir();
 	String module_dir = odin_root_dir();
 
 
 	String core = str_lit("core/");
 	String core = str_lit("core/");
@@ -1200,7 +1206,7 @@ gb_internal String get_fullpath_core_collection(gbAllocator a, String path) {
 
 
 	String res = make_string(str, i);
 	String res = make_string(str, i);
 	res = string_trim_whitespace(res);
 	res = string_trim_whitespace(res);
-	return path_to_fullpath(a, res);
+	return path_to_fullpath(a, res, ok_);
 }
 }
 
 
 gb_internal bool show_error_line(void) {
 gb_internal bool show_error_line(void) {

+ 136 - 53
src/check_builtin.cpp

@@ -1264,6 +1264,139 @@ gb_internal LoadDirectiveResult check_load_directive(CheckerContext *c, Operand
 
 
 }
 }
 
 
+gb_internal int file_cache_sort_cmp(void const *x, void const *y) {
+	LoadFileCache const *a = *(LoadFileCache const **)(x);
+	LoadFileCache const *b = *(LoadFileCache const **)(y);
+	return string_compare(a->path, b->path);
+}
+
+gb_internal LoadDirectiveResult check_load_directory_directive(CheckerContext *c, Operand *operand, Ast *call, Type *type_hint, bool err_on_not_found) {
+	ast_node(ce, CallExpr, call);
+	ast_node(bd, BasicDirective, ce->proc);
+	String name = bd->name.string;
+	GB_ASSERT(name == "load_directory");
+
+	if (ce->args.count != 1) {
+		error(ce->args[0], "'#%.*s' expects 1 argument, got %td", LIT(name), ce->args.count);
+		return LoadDirective_Error;
+	}
+
+	Ast *arg = ce->args[0];
+	Operand o = {};
+	check_expr(c, &o, arg);
+	if (o.mode != Addressing_Constant) {
+		error(arg, "'#%.*s' expected a constant string argument", LIT(name));
+		return LoadDirective_Error;
+	}
+
+	if (!is_type_string(o.type)) {
+		gbString str = type_to_string(o.type);
+		error(arg, "'#%.*s' expected a constant string, got %s", LIT(name), str);
+		gb_string_free(str);
+		return LoadDirective_Error;
+	}
+
+	GB_ASSERT(o.value.kind == ExactValue_String);
+
+	init_core_load_directory_file(c->checker);
+
+	operand->type = t_load_directory_file_slice;
+	operand->mode = Addressing_Value;
+
+
+	String original_string = o.value.value_string;
+	String path;
+	if (gb_path_is_absolute((char*)original_string.text)) {
+		path = original_string;
+	} else {
+		String base_dir = dir_from_path(get_file_path_string(call->file_id));
+
+		BlockingMutex *ignore_mutex = nullptr;
+		bool ok = determine_path_from_string(ignore_mutex, call, base_dir, original_string, &path);
+		gb_unused(ok);
+	}
+	MUTEX_GUARD(&c->info->load_directory_mutex);
+
+
+	gbFileError file_error = gbFileError_None;
+
+	Array<LoadFileCache *> file_caches = {};
+
+	LoadDirectoryCache **cache_ptr = string_map_get(&c->info->load_directory_cache, path);
+	LoadDirectoryCache *cache = cache_ptr ? *cache_ptr : nullptr;
+	if (cache) {
+		file_error = cache->file_error;
+	}
+	defer ({
+		if (cache == nullptr) {
+			LoadDirectoryCache *new_cache = gb_alloc_item(permanent_allocator(), LoadDirectoryCache);
+			new_cache->path = path;
+			new_cache->files = file_caches;
+			new_cache->file_error = file_error;
+			string_map_set(&c->info->load_directory_cache, path, new_cache);
+
+			map_set(&c->info->load_directory_map, call, new_cache);
+		} else {
+			cache->file_error = file_error;
+		}
+	});
+
+
+	LoadDirectiveResult result = LoadDirective_Success;
+
+
+	if (cache == nullptr)  {
+		Array<FileInfo> list = {};
+		ReadDirectoryError rd_err = read_directory(path, &list);
+		defer (array_free(&list));
+
+		if (list.count == 1) {
+			GB_ASSERT(path != list[0].fullpath);
+		}
+
+
+		switch (rd_err) {
+		case ReadDirectory_InvalidPath:
+			error(call, "%.*s error - invalid path: %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_NotFound;
+		case ReadDirectory_NotExists:
+			error(call, "%.*s error - path does not exist: %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_NotFound;
+		case ReadDirectory_Permission:
+			error(call, "%.*s error - unknown error whilst reading path, %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_Error;
+		case ReadDirectory_NotDir:
+			error(call, "%.*s error - expected a directory, got a file: %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_Error;
+		case ReadDirectory_Empty:
+			error(call, "%.*s error - empty directory: %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_NotFound;
+		case ReadDirectory_Unknown:
+			error(call, "%.*s error - unknown error whilst reading path %.*s", LIT(name), LIT(original_string));
+			return LoadDirective_Error;
+		}
+
+		isize files_to_reserve = list.count+1; // always reserve 1
+
+		file_caches = array_make<LoadFileCache *>(heap_allocator(), 0, files_to_reserve);
+
+		for (FileInfo fi : list) {
+			LoadFileCache *cache = nullptr;
+			if (cache_load_file_directive(c, call, fi.fullpath, err_on_not_found, &cache)) {
+				array_add(&file_caches, cache);
+			} else {
+				result = LoadDirective_Error;
+			}
+		}
+
+		gb_sort_array(file_caches.data, file_caches.count, file_cache_sort_cmp);
+
+	}
+
+	return result;
+}
+
+
 
 
 gb_internal bool check_builtin_procedure_directive(CheckerContext *c, Operand *operand, Ast *call, Type *type_hint) {
 gb_internal bool check_builtin_procedure_directive(CheckerContext *c, Operand *operand, Ast *call, Type *type_hint) {
 	ast_node(ce, CallExpr, call);
 	ast_node(ce, CallExpr, call);
@@ -1291,6 +1424,8 @@ gb_internal bool check_builtin_procedure_directive(CheckerContext *c, Operand *o
 		operand->mode = Addressing_Value;
 		operand->mode = Addressing_Value;
 	} else if (name == "load") {
 	} else if (name == "load") {
 		return check_load_directive(c, operand, call, type_hint, true) == LoadDirective_Success;
 		return check_load_directive(c, operand, call, type_hint, true) == LoadDirective_Success;
+	} else if (name == "load_directory") {
+		return check_load_directory_directive(c, operand, call, type_hint, true) == LoadDirective_Success;
 	} else if (name == "load_hash") {
 	} else if (name == "load_hash") {
 		if (ce->args.count != 2) {
 		if (ce->args.count != 2) {
 			if (ce->args.count == 0) {
 			if (ce->args.count == 0) {
@@ -1408,58 +1543,6 @@ gb_internal bool check_builtin_procedure_directive(CheckerContext *c, Operand *o
 			return true;
 			return true;
 		}
 		}
 		return false;
 		return false;
-	} else if (name == "load_or") {
-		error(call, "'#load_or' has now been removed in favour of '#load(path) or_else default'");
-
-		if (ce->args.count != 2) {
-			if (ce->args.count == 0) {
-				error(ce->close, "'#load_or' expects 2 arguments, got 0");
-			} else {
-				error(ce->args[0], "'#load_or' expects 2 arguments, got %td", ce->args.count);
-			}
-			return false;
-		}
-
-		Ast *arg = ce->args[0];
-		Operand o = {};
-		check_expr(c, &o, arg);
-		if (o.mode != Addressing_Constant) {
-			error(arg, "'#load_or' expected a constant string argument");
-			return false;
-		}
-
-		if (!is_type_string(o.type)) {
-			gbString str = type_to_string(o.type);
-			error(arg, "'#load_or' expected a constant string, got %s", str);
-			gb_string_free(str);
-			return false;
-		}
-
-		Ast *default_arg = ce->args[1];
-		Operand default_op = {};
-		check_expr_with_type_hint(c, &default_op, default_arg, t_u8_slice);
-		if (default_op.mode != Addressing_Constant) {
-			error(arg, "'#load_or' expected a constant '[]byte' argument");
-			return false;
-		}
-
-		if (!are_types_identical(base_type(default_op.type), t_u8_slice)) {
-			gbString str = type_to_string(default_op.type);
-			error(arg, "'#load_or' expected a constant '[]byte', got %s", str);
-			gb_string_free(str);
-			return false;
-		}
-		GB_ASSERT(o.value.kind == ExactValue_String);
-		String original_string = o.value.value_string;
-
-		operand->type = t_u8_slice;
-		operand->mode = Addressing_Constant;
-		LoadFileCache *cache = nullptr;
-		if (cache_load_file_directive(c, call, original_string, false, &cache)) {
-			operand->value = exact_value_string(cache->data);
-		} else {
-			operand->value = default_op.value;
-		}
 	} else if (name == "assert") {
 	} else if (name == "assert") {
 		if (ce->args.count != 1 && ce->args.count != 2) {
 		if (ce->args.count != 1 && ce->args.count != 2) {
 			error(call, "'#assert' expects either 1 or 2 arguments, got %td", ce->args.count);
 			error(call, "'#assert' expects either 1 or 2 arguments, got %td", ce->args.count);
@@ -5686,7 +5769,7 @@ gb_internal bool check_builtin_procedure(CheckerContext *c, Operand *operand, As
 				return false;
 				return false;
 			}
 			}
 
 
-			operand->value = exact_value_bool(is_type_subtype_of(op_src.type, op_dst.type));
+			operand->value = exact_value_bool(is_type_subtype_of_and_allow_polymorphic(op_src.type, op_dst.type));
 			operand->mode = Addressing_Constant;
 			operand->mode = Addressing_Constant;
 			operand->type = t_untyped_bool;
 			operand->type = t_untyped_bool;
 		} break;
 		} break;

+ 22 - 17
src/check_expr.cpp

@@ -3118,19 +3118,20 @@ gb_internal void check_cast(CheckerContext *c, Operand *x, Type *type) {
 		Type *src = core_type(x->type);
 		Type *src = core_type(x->type);
 		Type *dst = core_type(type);
 		Type *dst = core_type(type);
 		if (src != dst) {
 		if (src != dst) {
+			bool const REQUIRE = true;
 			if (is_type_integer_128bit(src) && is_type_float(dst)) {
 			if (is_type_integer_128bit(src) && is_type_float(dst)) {
-				add_package_dependency(c, "runtime", "floattidf_unsigned");
-				add_package_dependency(c, "runtime", "floattidf");
+				add_package_dependency(c, "runtime", "floattidf_unsigned", REQUIRE);
+				add_package_dependency(c, "runtime", "floattidf",          REQUIRE);
 			} else if (is_type_integer_128bit(dst) && is_type_float(src)) {
 			} else if (is_type_integer_128bit(dst) && is_type_float(src)) {
-				add_package_dependency(c, "runtime", "fixunsdfti");
-				add_package_dependency(c, "runtime", "fixunsdfdi");
+				add_package_dependency(c, "runtime", "fixunsdfti",         REQUIRE);
+				add_package_dependency(c, "runtime", "fixunsdfdi",         REQUIRE);
 			} else if (src == t_f16 && is_type_float(dst)) {
 			} else if (src == t_f16 && is_type_float(dst)) {
-				add_package_dependency(c, "runtime", "gnu_h2f_ieee");
-				add_package_dependency(c, "runtime", "extendhfsf2");
+				add_package_dependency(c, "runtime", "gnu_h2f_ieee",       REQUIRE);
+				add_package_dependency(c, "runtime", "extendhfsf2",        REQUIRE);
 			} else if (is_type_float(dst) && dst == t_f16) {
 			} else if (is_type_float(dst) && dst == t_f16) {
-				add_package_dependency(c, "runtime", "truncsfhf2");
-				add_package_dependency(c, "runtime", "truncdfhf2");
-				add_package_dependency(c, "runtime", "gnu_f2h_ieee");
+				add_package_dependency(c, "runtime", "truncsfhf2",         REQUIRE);
+				add_package_dependency(c, "runtime", "truncdfhf2",         REQUIRE);
+				add_package_dependency(c, "runtime", "gnu_f2h_ieee",       REQUIRE);
 			}
 			}
 		}
 		}
 	}
 	}
@@ -3753,12 +3754,15 @@ gb_internal void check_binary_expr(CheckerContext *c, Operand *x, Ast *node, Typ
 		x->mode = Addressing_Invalid;
 		x->mode = Addressing_Invalid;
 		return;
 		return;
 	}
 	}
+
+	bool REQUIRE = true;
+
 	Type *bt = base_type(x->type);
 	Type *bt = base_type(x->type);
 	if (op.kind == Token_Mod    || op.kind == Token_ModEq ||
 	if (op.kind == Token_Mod    || op.kind == Token_ModEq ||
 	    op.kind == Token_ModMod || op.kind == Token_ModModEq) {
 	    op.kind == Token_ModMod || op.kind == Token_ModModEq) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
-		case Basic_u128: add_package_dependency(c, "runtime", "umodti3"); break;
-		case Basic_i128: add_package_dependency(c, "runtime", "modti3");  break;
+		case Basic_u128: add_package_dependency(c, "runtime", "umodti3", REQUIRE); break;
+		case Basic_i128: add_package_dependency(c, "runtime", "modti3",  REQUIRE); break;
 		}
 		}
 	} else if (op.kind == Token_Quo || op.kind == Token_QuoEq) {
 	} else if (op.kind == Token_Quo || op.kind == Token_QuoEq) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
@@ -3769,8 +3773,8 @@ gb_internal void check_binary_expr(CheckerContext *c, Operand *x, Ast *node, Typ
 		case Basic_quaternion128: add_package_dependency(c, "runtime", "quo_quaternion128"); break;
 		case Basic_quaternion128: add_package_dependency(c, "runtime", "quo_quaternion128"); break;
 		case Basic_quaternion256: add_package_dependency(c, "runtime", "quo_quaternion256"); break;
 		case Basic_quaternion256: add_package_dependency(c, "runtime", "quo_quaternion256"); break;
 
 
-		case Basic_u128: add_package_dependency(c, "runtime", "udivti3"); break;
-		case Basic_i128: add_package_dependency(c, "runtime", "divti3");  break;
+		case Basic_u128: add_package_dependency(c, "runtime", "udivti3", REQUIRE); break;
+		case Basic_i128: add_package_dependency(c, "runtime", "divti3",  REQUIRE); break;
 		}
 		}
 	} else if (op.kind == Token_Mul || op.kind == Token_MulEq) {
 	} else if (op.kind == Token_Mul || op.kind == Token_MulEq) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
 		if (bt->kind == Type_Basic) switch (bt->Basic.kind) {
@@ -3782,7 +3786,7 @@ gb_internal void check_binary_expr(CheckerContext *c, Operand *x, Ast *node, Typ
 		case Basic_u128:
 		case Basic_u128:
 		case Basic_i128:
 		case Basic_i128:
 			if (is_arch_wasm()) {
 			if (is_arch_wasm()) {
-				add_package_dependency(c, "runtime", "__multi3");
+				add_package_dependency(c, "runtime", "__multi3", REQUIRE);
 			}
 			}
 			break;
 			break;
 		}
 		}
@@ -3791,7 +3795,7 @@ gb_internal void check_binary_expr(CheckerContext *c, Operand *x, Ast *node, Typ
 		case Basic_u128:
 		case Basic_u128:
 		case Basic_i128:
 		case Basic_i128:
 			if (is_arch_wasm()) {
 			if (is_arch_wasm()) {
-				add_package_dependency(c, "runtime", "__ashlti3");
+				add_package_dependency(c, "runtime", "__ashlti3", REQUIRE);
 			}
 			}
 			break;
 			break;
 		}
 		}
@@ -7103,8 +7107,8 @@ gb_internal ExprKind check_call_expr(CheckerContext *c, Operand *operand, Ast *c
 		    name == "defined" || 
 		    name == "defined" || 
 		    name == "config" || 
 		    name == "config" || 
 		    name == "load" ||
 		    name == "load" ||
-		    name == "load_hash" ||
-		    name == "load_or"
+		    name == "load_directory" ||
+		    name == "load_hash"
 		) {
 		) {
 			operand->mode = Addressing_Builtin;
 			operand->mode = Addressing_Builtin;
 			operand->builtin_id = BuiltinProc_DIRECTIVE;
 			operand->builtin_id = BuiltinProc_DIRECTIVE;
@@ -7954,6 +7958,7 @@ gb_internal ExprKind check_basic_directive_expr(CheckerContext *c, Operand *o, A
 		    name == "config" ||
 		    name == "config" ||
 		    name == "load" ||
 		    name == "load" ||
 		    name == "load_hash" ||
 		    name == "load_hash" ||
+		    name == "load_directory" ||
 		    name == "load_or"
 		    name == "load_or"
 		) {
 		) {
 			error(node, "'#%.*s' must be used as a call", LIT(name));
 			error(node, "'#%.*s' must be used as a call", LIT(name));

+ 25 - 6
src/check_type.cpp

@@ -1,4 +1,6 @@
 gb_internal ParameterValue handle_parameter_value(CheckerContext *ctx, Type *in_type, Type **out_type_, Ast *expr, bool allow_caller_location);
 gb_internal ParameterValue handle_parameter_value(CheckerContext *ctx, Type *in_type, Type **out_type_, Ast *expr, bool allow_caller_location);
+gb_internal Type *determine_type_from_polymorphic(CheckerContext *ctx, Type *poly_type, Operand const &operand);
+gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_params, bool *is_variadic_, isize *variadic_index_, bool *success_, isize *specialization_count_, Array<Operand> const *operands);
 
 
 gb_internal void populate_using_array_index(CheckerContext *ctx, Ast *node, AstField *field, Type *t, String name, i32 idx) {
 gb_internal void populate_using_array_index(CheckerContext *ctx, Ast *node, AstField *field, Type *t, String name, i32 idx) {
 	t = base_type(t);
 	t = base_type(t);
@@ -393,7 +395,6 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
                                                   bool *is_polymorphic_,
                                                   bool *is_polymorphic_,
                                                   Ast *node, Array<Operand> *poly_operands) {
                                                   Ast *node, Array<Operand> *poly_operands) {
 	Type *polymorphic_params_type = nullptr;
 	Type *polymorphic_params_type = nullptr;
-	bool can_check_fields = true;
 	GB_ASSERT(is_polymorphic_ != nullptr);
 	GB_ASSERT(is_polymorphic_ != nullptr);
 
 
 	if (polymorphic_params == nullptr) {
 	if (polymorphic_params == nullptr) {
@@ -403,6 +404,17 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 		return polymorphic_params_type;
 		return polymorphic_params_type;
 	}
 	}
 
 
+
+	// bool is_variadic = false;
+	// isize variadic_index = 0;
+	// bool success = false;
+	// isize specialization_count = 0;
+	// polymorphic_params_type = check_get_params(ctx, ctx->scope, polymorphic_params, &is_variadic, &variadic_index, &success, &specialization_count, poly_operands);
+	// if (success) {
+	// 	return nullptr;
+	// }
+
+	bool can_check_fields = true;
 	ast_node(field_list, FieldList, polymorphic_params);
 	ast_node(field_list, FieldList, polymorphic_params);
 	Slice<Ast *> params = field_list->list;
 	Slice<Ast *> params = field_list->list;
 	if (params.count != 0) {
 	if (params.count != 0) {
@@ -417,11 +429,13 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 
 
 		auto entities = array_make<Entity *>(permanent_allocator(), 0, variable_count);
 		auto entities = array_make<Entity *>(permanent_allocator(), 0, variable_count);
 
 
+		i32 field_group_index = -1;
 		for_array(i, params) {
 		for_array(i, params) {
 			Ast *param = params[i];
 			Ast *param = params[i];
 			if (param->kind != Ast_Field) {
 			if (param->kind != Ast_Field) {
 				continue;
 				continue;
 			}
 			}
+			field_group_index += 1;
 			ast_node(p, Field, param);
 			ast_node(p, Field, param);
 			Ast *type_expr = p->type;
 			Ast *type_expr = p->type;
 			Ast *default_value = unparen_expr(p->default_value);
 			Ast *default_value = unparen_expr(p->default_value);
@@ -481,7 +495,7 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 				type = t_invalid;
 				type = t_invalid;
 			}
 			}
 
 
-			if (is_type_polymorphic_type) {
+			if (is_type_polymorphic_type && !is_type_proc(type)) {
 				gbString str = type_to_string(type);
 				gbString str = type_to_string(type);
 				error(params[i], "Parameter types cannot be polymorphic, got %s", str);
 				error(params[i], "Parameter types cannot be polymorphic, got %s", str);
 				gb_string_free(str);
 				gb_string_free(str);
@@ -523,13 +537,18 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 						e->TypeName.is_type_alias = true;
 						e->TypeName.is_type_alias = true;
 						e->flags |= EntityFlag_PolyConst;
 						e->flags |= EntityFlag_PolyConst;
 					} else {
 					} else {
-						if (is_type_polymorphic(base_type(operand.type))) {
+						Type *t = operand.type;
+						if (is_type_proc(type)) {
+							t = determine_type_from_polymorphic(ctx, type, operand);
+						}
+						if (is_type_polymorphic(base_type(t))) {
 							*is_polymorphic_ = true;
 							*is_polymorphic_ = true;
 							can_check_fields = false;
 							can_check_fields = false;
 						}
 						}
 						if (e == nullptr) {
 						if (e == nullptr) {
-							e = alloc_entity_constant(scope, token, operand.type, operand.value);
+							e = alloc_entity_const_param(scope, token, t, operand.value, is_type_polymorphic(t));
 							e->Constant.param_value = param_value;
 							e->Constant.param_value = param_value;
+							e->Constant.field_group_index = field_group_index;
 						}
 						}
 					}
 					}
 				} else {
 				} else {
@@ -538,7 +557,8 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 						e->TypeName.is_type_alias = true;
 						e->TypeName.is_type_alias = true;
 						e->flags |= EntityFlag_PolyConst;
 						e->flags |= EntityFlag_PolyConst;
 					} else {
 					} else {
-						e = alloc_entity_constant(scope, token, type, param_value.value);
+						e = alloc_entity_const_param(scope, token, type, param_value.value, is_type_polymorphic(type));
+						e->Constant.field_group_index = field_group_index;
 						e->Constant.param_value = param_value;
 						e->Constant.param_value = param_value;
 					}
 					}
 				}
 				}
@@ -559,7 +579,6 @@ gb_internal Type *check_record_polymorphic_params(CheckerContext *ctx, Ast *poly
 	if (!*is_polymorphic_) {
 	if (!*is_polymorphic_) {
 		*is_polymorphic_ = polymorphic_params != nullptr && poly_operands == nullptr;
 		*is_polymorphic_ = polymorphic_params != nullptr && poly_operands == nullptr;
 	}
 	}
-
 	return polymorphic_params_type;
 	return polymorphic_params_type;
 }
 }
 
 

+ 26 - 8
src/checker.cpp

@@ -774,7 +774,7 @@ gb_internal void add_type_info_dependency(CheckerInfo *info, DeclInfo *d, Type *
 gb_internal AstPackage *get_runtime_package(CheckerInfo *info) {
 gb_internal AstPackage *get_runtime_package(CheckerInfo *info) {
 	String name = str_lit("runtime");
 	String name = str_lit("runtime");
 	gbAllocator a = heap_allocator();
 	gbAllocator a = heap_allocator();
-	String path = get_fullpath_base_collection(a, name);
+	String path = get_fullpath_base_collection(a, name, nullptr);
 	defer (gb_free(a, path.text));
 	defer (gb_free(a, path.text));
 	auto found = string_map_get(&info->packages, path);
 	auto found = string_map_get(&info->packages, path);
 	if (found == nullptr) {
 	if (found == nullptr) {
@@ -795,7 +795,7 @@ gb_internal AstPackage *get_core_package(CheckerInfo *info, String name) {
 	}
 	}
 
 
 	gbAllocator a = heap_allocator();
 	gbAllocator a = heap_allocator();
-	String path = get_fullpath_core_collection(a, name);
+	String path = get_fullpath_core_collection(a, name, nullptr);
 	defer (gb_free(a, path.text));
 	defer (gb_free(a, path.text));
 	auto found = string_map_get(&info->packages, path);
 	auto found = string_map_get(&info->packages, path);
 	if (found == nullptr) {
 	if (found == nullptr) {
@@ -810,13 +810,16 @@ gb_internal AstPackage *get_core_package(CheckerInfo *info, String name) {
 	return *found;
 	return *found;
 }
 }
 
 
-gb_internal void add_package_dependency(CheckerContext *c, char const *package_name, char const *name) {
+gb_internal void add_package_dependency(CheckerContext *c, char const *package_name, char const *name, bool required=false) {
 	String n = make_string_c(name);
 	String n = make_string_c(name);
 	AstPackage *p = get_core_package(&c->checker->info, make_string_c(package_name));
 	AstPackage *p = get_core_package(&c->checker->info, make_string_c(package_name));
 	Entity *e = scope_lookup(p->scope, n);
 	Entity *e = scope_lookup(p->scope, n);
 	GB_ASSERT_MSG(e != nullptr, "%s", name);
 	GB_ASSERT_MSG(e != nullptr, "%s", name);
 	GB_ASSERT(c->decl != nullptr);
 	GB_ASSERT(c->decl != nullptr);
 	e->flags |= EntityFlag_Used;
 	e->flags |= EntityFlag_Used;
+	if (required) {
+		e->flags |= EntityFlag_Require;
+	}
 	add_dependency(c->info, c->decl, e);
 	add_dependency(c->info, c->decl, e);
 }
 }
 
 
@@ -1254,6 +1257,9 @@ gb_internal void init_checker_info(CheckerInfo *i) {
 	mpsc_init(&i->required_global_variable_queue, a); // 1<<10);
 	mpsc_init(&i->required_global_variable_queue, a); // 1<<10);
 	mpsc_init(&i->required_foreign_imports_through_force_queue, a); // 1<<10);
 	mpsc_init(&i->required_foreign_imports_through_force_queue, a); // 1<<10);
 	mpsc_init(&i->intrinsics_entry_point_usage, a); // 1<<10); // just waste some memory here, even if it probably never used
 	mpsc_init(&i->intrinsics_entry_point_usage, a); // 1<<10); // just waste some memory here, even if it probably never used
+
+	string_map_init(&i->load_directory_cache);
+	map_init(&i->load_directory_map);
 }
 }
 
 
 gb_internal void destroy_checker_info(CheckerInfo *i) {
 gb_internal void destroy_checker_info(CheckerInfo *i) {
@@ -1277,6 +1283,8 @@ gb_internal void destroy_checker_info(CheckerInfo *i) {
 
 
 	map_destroy(&i->objc_msgSend_types);
 	map_destroy(&i->objc_msgSend_types);
 	string_map_destroy(&i->load_file_cache);
 	string_map_destroy(&i->load_file_cache);
+	string_map_destroy(&i->load_directory_cache);
+	map_destroy(&i->load_directory_map);
 }
 }
 
 
 gb_internal CheckerContext make_checker_context(Checker *c) {
 gb_internal CheckerContext make_checker_context(Checker *c) {
@@ -2567,7 +2575,7 @@ gb_internal void generate_minimum_dependency_set(Checker *c, Entity *start) {
 		str_lit("memmove"),
 		str_lit("memmove"),
 	);
 	);
 
 
-	// FORCE_ADD_RUNTIME_ENTITIES(!build_context.tilde_backend,
+	FORCE_ADD_RUNTIME_ENTITIES(is_arch_wasm() && !build_context.tilde_backend,
 	// 	// Extended data type internal procedures
 	// 	// Extended data type internal procedures
 	// 	str_lit("umodti3"),
 	// 	str_lit("umodti3"),
 	// 	str_lit("udivti3"),
 	// 	str_lit("udivti3"),
@@ -2584,10 +2592,10 @@ gb_internal void generate_minimum_dependency_set(Checker *c, Entity *start) {
 	// 	str_lit("gnu_f2h_ieee"),
 	// 	str_lit("gnu_f2h_ieee"),
 	// 	str_lit("extendhfsf2"),
 	// 	str_lit("extendhfsf2"),
 
 
-	// 	// WASM Specific
-	// 	str_lit("__ashlti3"),
-	// 	str_lit("__multi3"),
-	// );
+		// WASM Specific
+		str_lit("__ashlti3"),
+		str_lit("__multi3"),
+	);
 
 
 	FORCE_ADD_RUNTIME_ENTITIES(!build_context.no_rtti,
 	FORCE_ADD_RUNTIME_ENTITIES(!build_context.no_rtti,
 		// Odin types
 		// Odin types
@@ -2955,6 +2963,16 @@ gb_internal void init_core_source_code_location(Checker *c) {
 	t_source_code_location_ptr = alloc_type_pointer(t_source_code_location);
 	t_source_code_location_ptr = alloc_type_pointer(t_source_code_location);
 }
 }
 
 
+gb_internal void init_core_load_directory_file(Checker *c) {
+	if (t_load_directory_file != nullptr) {
+		return;
+	}
+	t_load_directory_file = find_core_type(c, str_lit("Load_Directory_File"));
+	t_load_directory_file_ptr = alloc_type_pointer(t_load_directory_file);
+	t_load_directory_file_slice = alloc_type_slice(t_load_directory_file);
+}
+
+
 gb_internal void init_core_map_type(Checker *c) {
 gb_internal void init_core_map_type(Checker *c) {
 	if (t_map_info != nullptr) {
 	if (t_map_info != nullptr) {
 		return;
 		return;

+ 18 - 0
src/checker.hpp

@@ -340,6 +340,19 @@ struct LoadFileCache {
 	StringMap<u64> hashes;
 	StringMap<u64> hashes;
 };
 };
 
 
+
+struct LoadDirectoryFile {
+	String file_name;
+	String data;
+};
+
+struct LoadDirectoryCache {
+	String                 path;
+	gbFileError            file_error;
+	Array<LoadFileCache *> files;
+};
+
+
 struct GenProcsData {
 struct GenProcsData {
 	Array<Entity *> procs;
 	Array<Entity *> procs;
 	RwMutex         mutex;
 	RwMutex         mutex;
@@ -416,6 +429,11 @@ struct CheckerInfo {
 	BlockingMutex instrumentation_mutex;
 	BlockingMutex instrumentation_mutex;
 	Entity *instrumentation_enter_entity;
 	Entity *instrumentation_enter_entity;
 	Entity *instrumentation_exit_entity;
 	Entity *instrumentation_exit_entity;
+
+
+	BlockingMutex                       load_directory_mutex;
+	StringMap<LoadDirectoryCache *>     load_directory_cache;
+	PtrMap<Ast *, LoadDirectoryCache *> load_directory_map; // Key: Ast_CallExpr *
 };
 };
 
 
 struct CheckerContext {
 struct CheckerContext {

+ 14 - 18
src/linker.cpp

@@ -482,37 +482,33 @@ gb_internal i32 linker_stage(LinkerData *gen) {
 			gbString platform_lib_str = gb_string_make(heap_allocator(), "");
 			gbString platform_lib_str = gb_string_make(heap_allocator(), "");
 			defer (gb_string_free(platform_lib_str));
 			defer (gb_string_free(platform_lib_str));
 			if (build_context.metrics.os == TargetOs_darwin) {
 			if (build_context.metrics.os == TargetOs_darwin) {
-				platform_lib_str = gb_string_appendc(platform_lib_str, "-Wl,-syslibroot /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -L/usr/local/lib");
+				platform_lib_str = gb_string_appendc(platform_lib_str, "-Wl,-syslibroot /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk -L/usr/local/lib ");
 
 
 				// Homebrew's default library path, checking if it exists to avoid linking warnings.
 				// Homebrew's default library path, checking if it exists to avoid linking warnings.
 				if (gb_file_exists("/opt/homebrew/lib")) {
 				if (gb_file_exists("/opt/homebrew/lib")) {
-					platform_lib_str = gb_string_appendc(platform_lib_str, " -L/opt/homebrew/lib");
+					platform_lib_str = gb_string_appendc(platform_lib_str, "-L/opt/homebrew/lib ");
 				}
 				}
 
 
 				// MacPort's default library path, checking if it exists to avoid linking warnings.
 				// MacPort's default library path, checking if it exists to avoid linking warnings.
 				if (gb_file_exists("/opt/local/lib")) {
 				if (gb_file_exists("/opt/local/lib")) {
-					platform_lib_str = gb_string_appendc(platform_lib_str, " -L/opt/local/lib");
+					platform_lib_str = gb_string_appendc(platform_lib_str, "-L/opt/local/lib ");
 				}
 				}
 
 
-				#if defined(GB_SYSTEM_OSX)
-				if(!build_context.no_crt) {
-					platform_lib_str = gb_string_appendc(platform_lib_str, " -lm ");
-					if(gen->needs_system_library_linked == 1) {
-						platform_lib_str = gb_string_appendc(platform_lib_str, " -lSystem ");
-					}
-				}
-				#endif
-			} else {
-				platform_lib_str = gb_string_appendc(platform_lib_str, "-lc -lm");
-			}
-
-			if (build_context.metrics.os == TargetOs_darwin) {
 				// This sets a requirement of Mountain Lion and up, but the compiler doesn't work without this limit.
 				// This sets a requirement of Mountain Lion and up, but the compiler doesn't work without this limit.
 				if (build_context.minimum_os_version_string.len) {
 				if (build_context.minimum_os_version_string.len) {
-					link_settings = gb_string_append_fmt(link_settings, " -mmacosx-version-min=%.*s ", LIT(build_context.minimum_os_version_string));
+					link_settings = gb_string_append_fmt(link_settings, "-mmacosx-version-min=%.*s ", LIT(build_context.minimum_os_version_string));
 				}
 				}
 				// This points the linker to where the entry point is
 				// This points the linker to where the entry point is
-				link_settings = gb_string_appendc(link_settings, " -e _main ");
+				link_settings = gb_string_appendc(link_settings, "-e _main ");
+			}
+
+			if (!build_context.no_crt) {
+				platform_lib_str = gb_string_appendc(platform_lib_str, "-lm ");
+				if (build_context.metrics.os == TargetOs_darwin) {
+					platform_lib_str = gb_string_appendc(platform_lib_str, "-lSystem ");
+				} else {
+					platform_lib_str = gb_string_appendc(platform_lib_str, "-lc ");
+				}
 			}
 			}
 
 
 			gbString link_command_line = gb_string_make(heap_allocator(), "clang -Wno-unused-command-line-argument ");
 			gbString link_command_line = gb_string_make(heap_allocator(), "clang -Wno-unused-command-line-argument ");

+ 1 - 1
src/llvm_backend.cpp

@@ -334,7 +334,7 @@ gb_internal void lb_add_callsite_force_inline(lbProcedure *p, lbValue ret_value)
 
 
 gb_internal lbValue lb_hasher_proc_for_type(lbModule *m, Type *type) {
 gb_internal lbValue lb_hasher_proc_for_type(lbModule *m, Type *type) {
 	type = core_type(type);
 	type = core_type(type);
-	GB_ASSERT_MSG(is_type_valid_for_keys(type), "%s", type_to_string(type));
+	GB_ASSERT_MSG(is_type_comparable(type), "%s", type_to_string(type));
 
 
 	Type *pt = alloc_type_pointer(type);
 	Type *pt = alloc_type_pointer(type);
 
 

+ 2 - 0
src/llvm_backend.hpp

@@ -567,6 +567,8 @@ gb_internal LLVMTypeRef lb_type_internal_for_procedures_raw(lbModule *m, Type *t
 
 
 gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, String const &procedure, TokenPos const &pos);
 gb_internal lbValue lb_emit_source_code_location_as_global_ptr(lbProcedure *p, String const &procedure, TokenPos const &pos);
 
 
+gb_internal LLVMMetadataRef lb_debug_location_from_token_pos(lbProcedure *p, TokenPos pos);
+
 gb_internal LLVMTypeRef llvm_array_type(LLVMTypeRef ElementType, uint64_t ElementCount) {
 gb_internal LLVMTypeRef llvm_array_type(LLVMTypeRef ElementType, uint64_t ElementCount) {
 #if LB_USE_NEW_PASS_SYSTEM
 #if LB_USE_NEW_PASS_SYSTEM
 	return LLVMArrayType2(ElementType, ElementCount);
 	return LLVMArrayType2(ElementType, ElementCount);

+ 1 - 1
src/llvm_backend_expr.cpp

@@ -3657,7 +3657,7 @@ gb_internal void lb_build_addr_compound_lit_populate(lbProcedure *p, Slice<Ast *
 		Ast *elem = elems[i];
 		Ast *elem = elems[i];
 		if (elem->kind == Ast_FieldValue) {
 		if (elem->kind == Ast_FieldValue) {
 			ast_node(fv, FieldValue, elem);
 			ast_node(fv, FieldValue, elem);
-			if (lb_is_elem_const(fv->value, et)) {
+			if (bt->kind != Type_DynamicArray && lb_is_elem_const(fv->value, et)) {
 				continue;
 				continue;
 			}
 			}
 			if (is_ast_range(fv->field)) {
 			if (is_ast_range(fv->field)) {

+ 15 - 4
src/llvm_backend_opt.cpp

@@ -380,9 +380,19 @@ gb_internal void lb_run_remove_dead_instruction_pass(lbProcedure *p) {
 	}
 	}
 }
 }
 
 
-gb_internal LLVMValueRef lb_run_instrumentation_pass_insert_call(lbProcedure *p, Entity *entity, LLVMBuilderRef dummy_builder) {
+gb_internal LLVMValueRef lb_run_instrumentation_pass_insert_call(lbProcedure *p, Entity *entity, LLVMBuilderRef dummy_builder, bool is_enter) {
 	lbModule *m = p->module;
 	lbModule *m = p->module;
 
 
+	if (p->debug_info != nullptr) {
+		TokenPos pos = {};
+		if (is_enter) {
+			pos = ast_token(p->body).pos;
+		} else {
+			pos = ast_end_token(p->body).pos;
+		}
+		LLVMSetCurrentDebugLocation2(dummy_builder, lb_debug_location_from_token_pos(p, pos));
+	}
+
 	lbValue cc = lb_find_procedure_value_from_entity(m, entity);
 	lbValue cc = lb_find_procedure_value_from_entity(m, entity);
 
 
 	LLVMValueRef args[3] = {};
 	LLVMValueRef args[3] = {};
@@ -430,7 +440,7 @@ gb_internal void lb_run_instrumentation_pass(lbProcedure *p) {
 
 
 	LLVMBasicBlockRef entry_bb = p->entry_block->block;
 	LLVMBasicBlockRef entry_bb = p->entry_block->block;
 	LLVMPositionBuilder(dummy_builder, entry_bb, LLVMGetFirstInstruction(entry_bb));
 	LLVMPositionBuilder(dummy_builder, entry_bb, LLVMGetFirstInstruction(entry_bb));
-	lb_run_instrumentation_pass_insert_call(p, enter, dummy_builder);
+	lb_run_instrumentation_pass_insert_call(p, enter, dummy_builder, true);
 	LLVMRemoveStringAttributeAtIndex(p->value, LLVMAttributeIndex_FunctionIndex, LLVM_V_NAME("instrument-function-entry"));
 	LLVMRemoveStringAttributeAtIndex(p->value, LLVMAttributeIndex_FunctionIndex, LLVM_V_NAME("instrument-function-entry"));
 
 
 	unsigned bb_count = LLVMCountBasicBlocks(p->value);
 	unsigned bb_count = LLVMCountBasicBlocks(p->value);
@@ -451,7 +461,7 @@ gb_internal void lb_run_instrumentation_pass(lbProcedure *p) {
 
 
 
 
 		LLVMPositionBuilderBefore(dummy_builder, terminator);
 		LLVMPositionBuilderBefore(dummy_builder, terminator);
-		lb_run_instrumentation_pass_insert_call(p, exit, dummy_builder);
+		lb_run_instrumentation_pass_insert_call(p, exit, dummy_builder, false);
 	}
 	}
 
 
 	LLVMRemoveStringAttributeAtIndex(p->value, LLVMAttributeIndex_FunctionIndex, LLVM_V_NAME("instrument-function-exit"));
 	LLVMRemoveStringAttributeAtIndex(p->value, LLVMAttributeIndex_FunctionIndex, LLVM_V_NAME("instrument-function-exit"));
@@ -471,6 +481,8 @@ gb_internal void lb_run_function_pass_manager(LLVMPassManagerRef fpm, lbProcedur
 	// are not removed
 	// are not removed
 	lb_run_remove_dead_instruction_pass(p);
 	lb_run_remove_dead_instruction_pass(p);
 
 
+	lb_run_instrumentation_pass(p);
+
 	switch (pass_manager_kind) {
 	switch (pass_manager_kind) {
 	case lbFunctionPassManager_none:
 	case lbFunctionPassManager_none:
 	    return;
 	    return;
@@ -481,7 +493,6 @@ gb_internal void lb_run_function_pass_manager(LLVMPassManagerRef fpm, lbProcedur
 	    }
 	    }
 	    break;
 	    break;
 	}
 	}
-	lb_run_instrumentation_pass(p);
 
 
 	LLVMRunFunctionPassManager(fpm, p->value);
 	LLVMRunFunctionPassManager(fpm, p->value);
 }
 }

+ 52 - 15
src/llvm_backend_proc.cpp

@@ -1693,24 +1693,61 @@ gb_internal lbValue lb_build_builtin_proc(lbProcedure *p, Ast *expr, TypeAndValu
 	case BuiltinProc_DIRECTIVE: {
 	case BuiltinProc_DIRECTIVE: {
 		ast_node(bd, BasicDirective, ce->proc);
 		ast_node(bd, BasicDirective, ce->proc);
 		String name = bd->name.string;
 		String name = bd->name.string;
-		GB_ASSERT(name == "location");
-		String procedure = p->entity->token.string;
-		TokenPos pos = ast_token(ce->proc).pos;
-		if (ce->args.count > 0) {
-			Ast *ident = unselector_expr(ce->args[0]);
-			GB_ASSERT(ident->kind == Ast_Ident);
-			Entity *e = entity_of_node(ident);
-			GB_ASSERT(e != nullptr);
-
-			if (e->parent_proc_decl != nullptr && e->parent_proc_decl->entity != nullptr) {
-				procedure = e->parent_proc_decl->entity->token.string;
-			} else {
-				procedure = str_lit("");
+		if (name == "location") {
+			String procedure = p->entity->token.string;
+			TokenPos pos = ast_token(ce->proc).pos;
+			if (ce->args.count > 0) {
+				Ast *ident = unselector_expr(ce->args[0]);
+				GB_ASSERT(ident->kind == Ast_Ident);
+				Entity *e = entity_of_node(ident);
+				GB_ASSERT(e != nullptr);
+
+				if (e->parent_proc_decl != nullptr && e->parent_proc_decl->entity != nullptr) {
+					procedure = e->parent_proc_decl->entity->token.string;
+				} else {
+					procedure = str_lit("");
+				}
+				pos = e->token.pos;
+
 			}
 			}
-			pos = e->token.pos;
+			return lb_emit_source_code_location_as_global(p, procedure, pos);
+		} else if (name == "load_directory") {
+			lbModule *m = p->module;
+			TEMPORARY_ALLOCATOR_GUARD();
+			LoadDirectoryCache *cache = map_must_get(&m->info->load_directory_map, expr);
+			isize count = cache->files.count;
+
+			LLVMValueRef *elements = gb_alloc_array(temporary_allocator(), LLVMValueRef, count);
+			for_array(i, cache->files) {
+				LoadFileCache *file = cache->files[i];
 
 
+				String file_name = filename_without_directory(file->path);
+
+				LLVMValueRef values[2] = {};
+				values[0] = lb_const_string(m, file_name).value;
+				values[1] = lb_const_string(m, file->data).value;
+				LLVMValueRef element = llvm_const_named_struct(m, t_load_directory_file, values, gb_count_of(values));
+				elements[i] = element;
+			}
+
+			LLVMValueRef backing_array = llvm_const_array(lb_type(m, t_load_directory_file), elements, count);
+
+			Type *array_type = alloc_type_array(t_load_directory_file, count);
+			lbAddr backing_array_addr = lb_add_global_generated(m, array_type, {backing_array, array_type}, nullptr);
+			lb_make_global_private_const(backing_array_addr);
+
+			LLVMValueRef backing_array_ptr = backing_array_addr.addr.value;
+			backing_array_ptr = LLVMConstPointerCast(backing_array_ptr, lb_type(m, t_load_directory_file_ptr));
+
+			LLVMValueRef const_slice = llvm_const_slice_internal(m, backing_array_ptr, LLVMConstInt(lb_type(m, t_int), count, false));
+
+			lbAddr addr = lb_add_global_generated(p->module, tv.type, {const_slice, t_load_directory_file_slice}, nullptr);
+			lb_make_global_private_const(addr);
+
+			return lb_addr_load(p, addr);
+		} else {
+			GB_PANIC("UNKNOWN DIRECTIVE: %.*s", LIT(name));
 		}
 		}
-		return lb_emit_source_code_location_as_global(p, procedure, pos);
 	}
 	}
 
 
 	case BuiltinProc_type_info_of: {
 	case BuiltinProc_type_info_of: {

+ 25 - 11
src/main.cpp

@@ -807,9 +807,10 @@ gb_internal bool parse_build_flags(Array<String> args) {
 							}
 							}
 
 
 							gbAllocator a = heap_allocator();
 							gbAllocator a = heap_allocator();
-							String fullpath = path_to_fullpath(a, path);
-							if (!path_is_directory(fullpath)) {
-								gb_printf_err("Library collection '%.*s' path must be a directory, got '%.*s'\n", LIT(name), LIT(fullpath));
+							bool path_ok = false;
+							String fullpath = path_to_fullpath(a, path, &path_ok);
+							if (!path_ok || !path_is_directory(fullpath)) {
+								gb_printf_err("Library collection '%.*s' path must be a directory, got '%.*s'\n", LIT(name), LIT(path_ok ? fullpath : path));
 								gb_free(a, fullpath.text);
 								gb_free(a, fullpath.text);
 								bad_flags = true;
 								bad_flags = true;
 								break;
 								break;
@@ -2395,9 +2396,18 @@ int main(int arg_count, char const **arg_ptr) {
 	TIME_SECTION("init default library collections");
 	TIME_SECTION("init default library collections");
 	array_init(&library_collections, heap_allocator());
 	array_init(&library_collections, heap_allocator());
 	// NOTE(bill): 'core' cannot be (re)defined by the user
 	// NOTE(bill): 'core' cannot be (re)defined by the user
-	add_library_collection(str_lit("base"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("base")));
-	add_library_collection(str_lit("core"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("core")));
-	add_library_collection(str_lit("vendor"), get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("vendor")));
+
+	auto const &add_collection = [](String const &name) {
+		bool ok = false;
+		add_library_collection(name, get_fullpath_relative(heap_allocator(), odin_root_dir(), name, &ok));
+		if (!ok) {
+			compiler_error("Cannot find the library collection '%.*s'. Is the ODIN_ROOT set up correctly?", LIT(name));
+		}
+	};
+
+	add_collection(str_lit("base"));
+	add_collection(str_lit("core"));
+	add_collection(str_lit("vendor"));
 
 
 	TIME_SECTION("init args");
 	TIME_SECTION("init args");
 	map_init(&build_context.defined_values);
 	map_init(&build_context.defined_values);
@@ -2425,14 +2435,18 @@ int main(int arg_count, char const **arg_ptr) {
 		Array<String> run_args = array_make<String>(heap_allocator(), 0, arg_count);
 		Array<String> run_args = array_make<String>(heap_allocator(), 0, arg_count);
 		defer (array_free(&run_args));
 		defer (array_free(&run_args));
 
 
+		isize run_args_start_idx = -1;
 		for_array(i, args) {
 		for_array(i, args) {
 			if (args[i] == "--") {
 			if (args[i] == "--") {
-				last_non_run_arg = i;
+				run_args_start_idx = i;
+				break;
 			}
 			}
-			if (i <= last_non_run_arg) {
-				continue;
+		}
+		if(run_args_start_idx != -1) {
+			last_non_run_arg = run_args_start_idx;
+			for(isize i = run_args_start_idx+1; i < args.count; ++i) {
+				array_add(&run_args, args[i]);
 			}
 			}
-			array_add(&run_args, args[i]);
 		}
 		}
 
 
 		args = array_slice(args, 0, last_non_run_arg);
 		args = array_slice(args, 0, last_non_run_arg);
@@ -2577,7 +2591,7 @@ int main(int arg_count, char const **arg_ptr) {
 	// NOTE(bill): add 'shared' directory if it is not already set
 	// NOTE(bill): add 'shared' directory if it is not already set
 	if (!find_library_collection_path(str_lit("shared"), nullptr)) {
 	if (!find_library_collection_path(str_lit("shared"), nullptr)) {
 		add_library_collection(str_lit("shared"),
 		add_library_collection(str_lit("shared"),
-			get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("shared")));
+			get_fullpath_relative(heap_allocator(), odin_root_dir(), str_lit("shared"), nullptr));
 	}
 	}
 
 
 	init_build_context(selected_target_metrics ? selected_target_metrics->metrics : nullptr, selected_subtarget);
 	init_build_context(selected_target_metrics ? selected_target_metrics->metrics : nullptr, selected_subtarget);

+ 63 - 3
src/parser.cpp

@@ -230,6 +230,10 @@ gb_internal Ast *clone_ast(Ast *node, AstFile *f) {
 	case Ast_OrReturnExpr:
 	case Ast_OrReturnExpr:
 		n->OrReturnExpr.expr = clone_ast(n->OrReturnExpr.expr, f);
 		n->OrReturnExpr.expr = clone_ast(n->OrReturnExpr.expr, f);
 		break;
 		break;
+	case Ast_OrBranchExpr:
+		n->OrBranchExpr.label = clone_ast(n->OrBranchExpr.label, f);
+		n->OrBranchExpr.expr  = clone_ast(n->OrBranchExpr.expr, f);
+		break;
 	case Ast_TypeAssertion:
 	case Ast_TypeAssertion:
 		n->TypeAssertion.expr = clone_ast(n->TypeAssertion.expr, f);
 		n->TypeAssertion.expr = clone_ast(n->TypeAssertion.expr, f);
 		n->TypeAssertion.type = clone_ast(n->TypeAssertion.type, f);
 		n->TypeAssertion.type = clone_ast(n->TypeAssertion.type, f);
@@ -2160,6 +2164,49 @@ gb_internal Array<Ast *> parse_union_variant_list(AstFile *f) {
 	return variants;
 	return variants;
 }
 }
 
 
+gb_internal void parser_check_polymorphic_record_parameters(AstFile *f, Ast *polymorphic_params) {
+	if (polymorphic_params == nullptr) {
+		return;
+	}
+	if (polymorphic_params->kind != Ast_FieldList) {
+		return;
+	}
+
+
+	enum {Unknown, Dollar, Bare} prefix = Unknown;
+	gb_unused(prefix);
+
+	for (Ast *field : polymorphic_params->FieldList.list) {
+		if (field == nullptr || field->kind != Ast_Field) {
+			continue;
+		}
+		for (Ast *name : field->Field.names) {
+			if (name == nullptr) {
+				continue;
+			}
+			bool error = false;
+
+			if (name->kind == Ast_Ident) {
+				switch (prefix) {
+				case Unknown: prefix = Bare; break;
+				case Dollar:  error = true;  break;
+				case Bare:                   break;
+				}
+			} else if (name->kind == Ast_PolyType) {
+				switch (prefix) {
+				case Unknown: prefix = Dollar; break;
+				case Dollar:                   break;
+				case Bare:    error = true;    break;
+				}
+			}
+			if (error) {
+				syntax_error(name, "Mixture of polymorphic $ names and normal identifiers are not allowed within record parameters");
+			}
+		}
+	}
+}
+
+
 gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
 gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
 	Ast *operand = nullptr; // Operand
 	Ast *operand = nullptr; // Operand
 	switch (f->curr_token.kind) {
 	switch (f->curr_token.kind) {
@@ -2606,6 +2653,8 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
 			decls = fields->FieldList.list;
 			decls = fields->FieldList.list;
 		}
 		}
 
 
+		parser_check_polymorphic_record_parameters(f, polymorphic_params);
+
 		return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, no_copy, align, field_align, where_token, where_clauses);
 		return ast_struct_type(f, token, decls, name_count, polymorphic_params, is_packed, is_raw_union, no_copy, align, field_align, where_token, where_clauses);
 	} break;
 	} break;
 
 
@@ -2698,6 +2747,8 @@ gb_internal Ast *parse_operand(AstFile *f, bool lhs) {
 		auto variants = parse_union_variant_list(f);
 		auto variants = parse_union_variant_list(f);
 		Token close = expect_closing_brace_of_field_list(f);
 		Token close = expect_closing_brace_of_field_list(f);
 
 
+		parser_check_polymorphic_record_parameters(f, polymorphic_params);
+
 		return ast_union_type(f, token, variants, polymorphic_params, align, union_kind, where_token, where_clauses);
 		return ast_union_type(f, token, variants, polymorphic_params, align, union_kind, where_token, where_clauses);
 	} break;
 	} break;
 
 
@@ -5515,7 +5566,8 @@ gb_internal bool determine_path_from_string(BlockingMutex *file_mutex, Ast *node
 	if (has_windows_drive) {
 	if (has_windows_drive) {
 		*path = file_str;
 		*path = file_str;
 	} else {
 	} else {
-		String fullpath = string_trim_whitespace(get_fullpath_relative(permanent_allocator(), base_dir, file_str));
+		bool ok = false;
+		String fullpath = string_trim_whitespace(get_fullpath_relative(permanent_allocator(), base_dir, file_str, &ok));
 		*path = fullpath;
 		*path = fullpath;
 	}
 	}
 	return true;
 	return true;
@@ -6137,7 +6189,11 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
 	{ // Add these packages serially and then process them parallel
 	{ // Add these packages serially and then process them parallel
 		TokenPos init_pos = {};
 		TokenPos init_pos = {};
 		{
 		{
-			String s = get_fullpath_base_collection(permanent_allocator(), str_lit("runtime"));
+			bool ok = false;
+			String s = get_fullpath_base_collection(permanent_allocator(), str_lit("runtime"), &ok);
+			if (!ok) {
+				compiler_error("Unable to find The 'base:runtime' package. Is the ODIN_ROOT set up correctly?");
+			}
 			try_add_import_path(p, s, s, init_pos, Package_Runtime);
 			try_add_import_path(p, s, s, init_pos, Package_Runtime);
 		}
 		}
 
 
@@ -6145,7 +6201,11 @@ gb_internal ParseFileError parse_packages(Parser *p, String init_filename) {
 		p->init_fullpath = init_fullpath;
 		p->init_fullpath = init_fullpath;
 
 
 		if (build_context.command_kind == Command_test) {
 		if (build_context.command_kind == Command_test) {
-			String s = get_fullpath_core_collection(permanent_allocator(), str_lit("testing"));
+			bool ok = false;
+			String s = get_fullpath_core_collection(permanent_allocator(), str_lit("testing"), &ok);
+			if (!ok) {
+				compiler_error("Unable to find The 'core:testing' package. Is the ODIN_ROOT set up correctly?");
+			}
 			try_add_import_path(p, s, s, init_pos, Package_Normal);
 			try_add_import_path(p, s, s, init_pos, Package_Normal);
 		}
 		}
 		
 		

+ 12 - 0
src/string.cpp

@@ -293,6 +293,18 @@ gb_internal String filename_from_path(String s) {
 	return make_string(nullptr, 0);
 	return make_string(nullptr, 0);
 }
 }
 
 
+
+gb_internal String filename_without_directory(String s) {
+	isize j = 0;
+	for (j = s.len-1; j >= 0; j--) {
+		if (s[j] == '/' ||
+			s[j] == '\\') {
+			break;
+		}
+	}
+	return substring(s, gb_max(j+1, 0), s.len);
+}
+
 gb_internal String concatenate_strings(gbAllocator a, String const &x, String const &y) {
 gb_internal String concatenate_strings(gbAllocator a, String const &x, String const &y) {
 	isize len = x.len+y.len;
 	isize len = x.len+y.len;
 	u8 *data = gb_alloc_array(a, u8, len+1);
 	u8 *data = gb_alloc_array(a, u8, len+1);

+ 21 - 2
src/types.cpp

@@ -679,6 +679,10 @@ gb_global Type *t_allocator_error                = nullptr;
 gb_global Type *t_source_code_location           = nullptr;
 gb_global Type *t_source_code_location           = nullptr;
 gb_global Type *t_source_code_location_ptr       = nullptr;
 gb_global Type *t_source_code_location_ptr       = nullptr;
 
 
+gb_global Type *t_load_directory_file            = nullptr;
+gb_global Type *t_load_directory_file_ptr        = nullptr;
+gb_global Type *t_load_directory_file_slice      = nullptr;
+
 gb_global Type *t_map_info                       = nullptr;
 gb_global Type *t_map_info                       = nullptr;
 gb_global Type *t_map_cell_info                  = nullptr;
 gb_global Type *t_map_cell_info                  = nullptr;
 gb_global Type *t_raw_map                        = nullptr;
 gb_global Type *t_raw_map                        = nullptr;
@@ -4093,7 +4097,7 @@ gb_internal i64 type_offset_of_from_selection(Type *type, Selection sel) {
 	return offset;
 	return offset;
 }
 }
 
 
-gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false) {
+gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false, bool allow_polymorphic=false) {
 	Type *prev_src = src;
 	Type *prev_src = src;
 	src = type_deref(src);
 	src = type_deref(src);
 	if (!src_is_ptr) {
 	if (!src_is_ptr) {
@@ -4105,11 +4109,19 @@ gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isi
 		return 0;
 		return 0;
 	}
 	}
 
 
+	bool dst_is_polymorphic = is_type_polymorphic(dst);
+
 	for_array(i, src->Struct.fields) {
 	for_array(i, src->Struct.fields) {
 		Entity *f = src->Struct.fields[i];
 		Entity *f = src->Struct.fields[i];
 		if (f->kind != Entity_Variable || (f->flags&EntityFlags_IsSubtype) == 0) {
 		if (f->kind != Entity_Variable || (f->flags&EntityFlags_IsSubtype) == 0) {
 			continue;
 			continue;
 		}
 		}
+		if (allow_polymorphic && dst_is_polymorphic) {
+			Type *fb = base_type(type_deref(f->type));
+			if (fb->kind == Type_Struct && fb->Struct.polymorphic_parent == dst) {
+				return true;
+			}
+		}
 
 
 		if (are_types_identical(f->type, dst)) {
 		if (are_types_identical(f->type, dst)) {
 			return level+1;
 			return level+1;
@@ -4119,7 +4131,7 @@ gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isi
 				return level+1;
 				return level+1;
 			}
 			}
 		}
 		}
-		isize nested_level = check_is_assignable_to_using_subtype(f->type, dst, level+1, src_is_ptr);
+		isize nested_level = check_is_assignable_to_using_subtype(f->type, dst, level+1, src_is_ptr, allow_polymorphic);
 		if (nested_level > 0) {
 		if (nested_level > 0) {
 			return nested_level;
 			return nested_level;
 		}
 		}
@@ -4135,6 +4147,13 @@ gb_internal bool is_type_subtype_of(Type *src, Type *dst) {
 
 
 	return 0 < check_is_assignable_to_using_subtype(src, dst, 0, is_type_pointer(src));
 	return 0 < check_is_assignable_to_using_subtype(src, dst, 0, is_type_pointer(src));
 }
 }
+gb_internal bool is_type_subtype_of_and_allow_polymorphic(Type *src, Type *dst) {
+	if (are_types_identical(src, dst)) {
+		return true;
+	}
+
+	return 0 < check_is_assignable_to_using_subtype(src, dst, 0, is_type_pointer(src), true);
+}
 
 
 
 
 gb_internal bool has_type_got_objc_class_attribute(Type *t) {
 gb_internal bool has_type_got_objc_class_attribute(Type *t) {

+ 1 - 1
tests/core/Makefile

@@ -39,7 +39,7 @@ hash_test:
 	$(ODIN) run hash -o:speed -no-bounds-check -out:test_hash 
 	$(ODIN) run hash -o:speed -no-bounds-check -out:test_hash 
 
 
 crypto_test:
 crypto_test:
-	$(ODIN) run crypto -o:speed -no-bounds-check -out:test_crypto_hash 
+	$(ODIN) run crypto -o:speed -no-bounds-check -out:test_crypto
 
 
 noise_test:
 noise_test:
 	$(ODIN) run math/noise -out:test_noise
 	$(ODIN) run math/noise -out:test_noise

+ 2 - 2
tests/core/build.bat

@@ -29,9 +29,9 @@ echo ---
 %PATH_TO_ODIN% run odin %COMMON% -o:size -out:test_core_odin.exe || exit /b
 %PATH_TO_ODIN% run odin %COMMON% -o:size -out:test_core_odin.exe || exit /b
 
 
 echo ---
 echo ---
-echo Running core:crypto hash tests
+echo Running core:crypto tests
 echo ---
 echo ---
-%PATH_TO_ODIN% run crypto %COMMON% -out:test_crypto_hash.exe || exit /b
+%PATH_TO_ODIN% run crypto %COMMON% -out:test_crypto.exe || exit /b
 
 
 echo ---
 echo ---
 echo Running core:encoding tests
 echo Running core:encoding tests

+ 415 - 425
tests/core/crypto/test_core_crypto.odin

@@ -8,34 +8,31 @@ package test_core_crypto
 		zhibog, dotbmp:  Initial implementation.
 		zhibog, dotbmp:  Initial implementation.
 		Jeroen van Rijn: Test runner setup.
 		Jeroen van Rijn: Test runner setup.
 
 
-	Tests for the hashing algorithms within the crypto library.
+	Tests for the various algorithms within the crypto library.
 	Where possible, the official test vectors are used to validate the implementation.
 	Where possible, the official test vectors are used to validate the implementation.
 */
 */
 
 
-import "core:testing"
+import "core:encoding/hex"
 import "core:fmt"
 import "core:fmt"
-import "core:strings"
+import "core:mem"
+import "core:os"
+import "core:testing"
+
+import "core:crypto"
+import "core:crypto/chacha20"
+import "core:crypto/chacha20poly1305"
 
 
-import "core:crypto/sha2"
-import "core:crypto/sha3"
 import "core:crypto/shake"
 import "core:crypto/shake"
-import "core:crypto/blake2b"
-import "core:crypto/blake2s"
-import "core:crypto/sm3"
-import "core:crypto/siphash"
-import "core:crypto/legacy/keccak"
-import "core:crypto/legacy/md5"
-import "core:crypto/legacy/sha1"
-import "core:os"
+import "core:crypto/x25519"
 
 
 TEST_count := 0
 TEST_count := 0
-TEST_fail  := 0
+TEST_fail := 0
 
 
 when ODIN_TEST {
 when ODIN_TEST {
-	expect  :: testing.expect
-	log     :: testing.log
+	expect :: testing.expect
+	log :: testing.log
 } else {
 } else {
-	expect  :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
+	expect :: proc(t: ^testing.T, condition: bool, message: string, loc := #caller_location) {
 		TEST_count += 1
 		TEST_count += 1
 		if !condition {
 		if !condition {
 			TEST_fail += 1
 			TEST_fail += 1
@@ -51,36 +48,18 @@ when ODIN_TEST {
 
 
 main :: proc() {
 main :: proc() {
 	t := testing.T{}
 	t := testing.T{}
-	test_md5(&t)
-	test_sha1(&t)
-	test_sha224(&t)
-	test_sha256(&t)
-	test_sha384(&t)
-	test_sha512(&t)
-	test_sha512_256(&t)
-	test_sha3_224(&t)
-	test_sha3_256(&t)
-	test_sha3_384(&t)
-	test_sha3_512(&t)
-	test_shake_128(&t)
-	test_shake_256(&t)
-	test_keccak_224(&t)
-	test_keccak_256(&t)
-	test_keccak_384(&t)
-	test_keccak_512(&t)
-	test_blake2b(&t)
-	test_blake2s(&t)
-	test_sm3(&t)
-	test_siphash_2_4(&t)
-
-	// "modern" crypto tests
+
+	test_rand_bytes(&t)
+
+	test_hash(&t)
+	test_mac(&t)
+
 	test_chacha20(&t)
 	test_chacha20(&t)
-	test_poly1305(&t)
 	test_chacha20poly1305(&t)
 	test_chacha20poly1305(&t)
+	test_shake(&t)
 	test_x25519(&t)
 	test_x25519(&t)
-	test_rand_bytes(&t)
 
 
-	bench_modern(&t)
+	bench_crypto(&t)
 
 
 	fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
 	fmt.printf("%v/%v tests successful.\n", TEST_count - TEST_fail, TEST_count)
 	if TEST_fail > 0 {
 	if TEST_fail > 0 {
@@ -88,411 +67,422 @@ main :: proc() {
 	}
 	}
 }
 }
 
 
-TestHash :: struct {
-	hash: string,
-	str:  string,
-}
-
-hex_string :: proc(bytes: []byte, allocator := context.temp_allocator) -> string {
-	lut: [16]byte = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'}
-	buf := make([]byte, len(bytes) * 2, allocator)
-	for i := 0; i < len(bytes); i += 1 {
-		buf[i * 2 + 0] = lut[bytes[i] >> 4 & 0xf]
-		buf[i * 2 + 1] = lut[bytes[i]      & 0xf]
-	}
-	return string(buf)
-}
-
-@(test)
-test_md5 :: proc(t: ^testing.T) {
-	// Official test vectors from https://datatracker.ietf.org/doc/html/rfc1321
-	test_vectors := [?]TestHash {
-		TestHash{"d41d8cd98f00b204e9800998ecf8427e", ""},
-		TestHash{"0cc175b9c0f1b6a831c399e269772661", "a"},
-		TestHash{"900150983cd24fb0d6963f7d28e17f72", "abc"},
-		TestHash{"f96b697d7cb7938d525a2f31aaf161d0", "message digest"},
-		TestHash{"c3fcd3d76192e4007dfb496cca67e13b", "abcdefghijklmnopqrstuvwxyz"},
-		TestHash{"d174ab98d277d9f5a5611c2c9f419d9f", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"},
-		TestHash{"57edf4a22be3c955ac49da2e2107b67a", "12345678901234567890123456789012345678901234567890123456789012345678901234567890"},
-	}
-	for v, _ in test_vectors {
-		computed     := md5.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha1 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
-		TestHash{"a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
-		TestHash{"f9537c23893d2014f365adf8ffe33b8eb0297ed1", "abcdbcdecdefdefgefghfghighijhi"},
-		TestHash{"346fb528a24b48f563cb061470bcfd23740427ad", "jkijkljklmklmnlmnomnopnopq"},
-		TestHash{"86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
-		TestHash{"c729c8996ee0a6f74f4f3248e8957edf704fb624", "01234567012345670123456701234567"},
-		TestHash{"84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"a49b2446a02c645bf419f995b67091253a04a259", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha1.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha224 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	// https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
-    data_1_000_000_a := strings.repeat("a", 1_000_000)
-	test_vectors := [?]TestHash {
-		TestHash{"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f", ""},
-		TestHash{"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7", "abc"},
-		TestHash{"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-		TestHash{"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67", data_1_000_000_a},
-	}
-	for v, _ in test_vectors {
-		computed     := sha2.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha256 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ""},
-		TestHash{"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc"},
-		TestHash{"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha2.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha384 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", ""},
-		TestHash{"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7", "abc"},
-		TestHash{"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha2.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha512 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", ""},
-		TestHash{"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f", "abc"},
-		TestHash{"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha2.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha512_256 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	test_vectors := [?]TestHash {
-		TestHash{"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23", "abc"},
-		TestHash{"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha2.hash_512_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha3_224 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7", ""},
-		TestHash{"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf", "abc"},
-		TestHash{"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e", "abcdbcdecdefdefgefghfghighijhi"},
-		TestHash{"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a", "jkijkljklmklmnlmnomnopnopq"},
-		TestHash{"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b", "a"},
-		TestHash{"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112", "01234567012345670123456701234567"},
-		TestHash{"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha3.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha3_256 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a", ""},
-		TestHash{"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532", "abc"},
-		TestHash{"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a", "abcdbcdecdefdefgefghfghighijhi"},
-		TestHash{"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98", "jkijkljklmklmnlmnomnopnopq"},
-		TestHash{"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b", "a"},
-		TestHash{"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767", "01234567012345670123456701234567"},
-		TestHash{"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha3.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha3_384 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004", ""},
-		TestHash{"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25", "abc"},
-		TestHash{"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a", "abcdbcdecdefdefgefghfghighijhi"},
-		TestHash{"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282", "jkijkljklmklmnlmnomnopnopq"},
-		TestHash{"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9", "a"},
-		TestHash{"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be", "01234567012345670123456701234567"},
-		TestHash{"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha3.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_sha3_512 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26", ""},
-		TestHash{"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0", "abc"},
-		TestHash{"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e", "abcdbcdecdefdefgefghfghighijhi"},
-		TestHash{"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891", "jkijkljklmklmnlmnomnopnopq"},
-		TestHash{"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a", "a"},
-		TestHash{"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1", "01234567012345670123456701234567"},
-		TestHash{"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"},
-		TestHash{"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185", "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"},
-	}
-	for v, _ in test_vectors {
-		computed     := sha3.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_shake_128 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"7f9c2ba4e88f827d616045507605853e", ""},
-		TestHash{"f4202e3c5852f9182a0430fd8144f0a7", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"853f4538be0db9621a6cea659a06c110", "The quick brown fox jumps over the lazy dof"},
-	}
-	for v, _ in test_vectors {
-		computed     := shake.hash_128(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
-
-@(test)
-test_shake_256 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f", ""},
-		TestHash{"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401", "The quick brown fox jumps over the lazy dof"},
-	}
-	for v, _ in test_vectors {
-		computed     := shake.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
-}
+_PLAINTEXT_SUNSCREEN_STR := "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."
 
 
 @(test)
 @(test)
-test_keccak_224 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd", ""},
-		TestHash{"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8", "abc"},
-	}
-	for v, _ in test_vectors {
-		computed     := keccak.hash_224(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+test_chacha20 :: proc(t: ^testing.T) {
+	log(t, "Testing (X)ChaCha20")
+
+	// Test cases taken from RFC 8439, and draft-irtf-cfrg-xchacha-03
+	plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
+
+	key := [chacha20.KEY_SIZE]byte {
+		0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+		0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+		0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+		0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+	}
+
+	nonce := [chacha20.NONCE_SIZE]byte {
+		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a,
+		0x00, 0x00, 0x00, 0x00,
+	}
+
+	ciphertext := [114]byte {
+		0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
+		0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
+		0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
+		0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b,
+		0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab,
+		0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57,
+		0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab,
+		0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8,
+		0x07, 0xca, 0x0d, 0xbf, 0x50, 0x0d, 0x6a, 0x61,
+		0x56, 0xa3, 0x8e, 0x08, 0x8a, 0x22, 0xb6, 0x5e,
+		0x52, 0xbc, 0x51, 0x4d, 0x16, 0xcc, 0xf8, 0x06,
+		0x81, 0x8c, 0xe9, 0x1a, 0xb7, 0x79, 0x37, 0x36,
+		0x5a, 0xf9, 0x0b, 0xbf, 0x74, 0xa3, 0x5b, 0xe6,
+		0xb4, 0x0b, 0x8e, 0xed, 0xf2, 0x78, 0x5e, 0x42,
+		0x87, 0x4d,
+	}
+	ciphertext_str := string(hex.encode(ciphertext[:], context.temp_allocator))
+
+	derived_ciphertext: [114]byte
+	ctx: chacha20.Context = ---
+	chacha20.init(&ctx, key[:], nonce[:])
+	chacha20.seek(&ctx, 1) // The test vectors start the counter at 1.
+	chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
+
+	derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
+	expect(
+		t,
+		derived_ciphertext_str == ciphertext_str,
+		fmt.tprintf(
+			"Expected %s for xor_bytes(plaintext_str), but got %s instead",
+			ciphertext_str,
+			derived_ciphertext_str,
+		),
+	)
+
+	xkey := [chacha20.KEY_SIZE]byte {
+		0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+		0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+		0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+		0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+	}
+
+	xnonce := [chacha20.XNONCE_SIZE]byte {
+		0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
+		0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
+		0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
+	}
+
+	xciphertext := [114]byte {
+		0xbd, 0x6d, 0x17, 0x9d, 0x3e, 0x83, 0xd4, 0x3b,
+		0x95, 0x76, 0x57, 0x94, 0x93, 0xc0, 0xe9, 0x39,
+		0x57, 0x2a, 0x17, 0x00, 0x25, 0x2b, 0xfa, 0xcc,
+		0xbe, 0xd2, 0x90, 0x2c, 0x21, 0x39, 0x6c, 0xbb,
+		0x73, 0x1c, 0x7f, 0x1b, 0x0b, 0x4a, 0xa6, 0x44,
+		0x0b, 0xf3, 0xa8, 0x2f, 0x4e, 0xda, 0x7e, 0x39,
+		0xae, 0x64, 0xc6, 0x70, 0x8c, 0x54, 0xc2, 0x16,
+		0xcb, 0x96, 0xb7, 0x2e, 0x12, 0x13, 0xb4, 0x52,
+		0x2f, 0x8c, 0x9b, 0xa4, 0x0d, 0xb5, 0xd9, 0x45,
+		0xb1, 0x1b, 0x69, 0xb9, 0x82, 0xc1, 0xbb, 0x9e,
+		0x3f, 0x3f, 0xac, 0x2b, 0xc3, 0x69, 0x48, 0x8f,
+		0x76, 0xb2, 0x38, 0x35, 0x65, 0xd3, 0xff, 0xf9,
+		0x21, 0xf9, 0x66, 0x4c, 0x97, 0x63, 0x7d, 0xa9,
+		0x76, 0x88, 0x12, 0xf6, 0x15, 0xc6, 0x8b, 0x13,
+		0xb5, 0x2e,
+	}
+	xciphertext_str := string(hex.encode(xciphertext[:], context.temp_allocator))
+
+	chacha20.init(&ctx, xkey[:], xnonce[:])
+	chacha20.seek(&ctx, 1)
+	chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
+
+	derived_ciphertext_str = string(hex.encode(derived_ciphertext[:], context.temp_allocator))
+	expect(
+		t,
+		derived_ciphertext_str == xciphertext_str,
+		fmt.tprintf(
+			"Expected %s for xor_bytes(plaintext_str), but got %s instead",
+			xciphertext_str,
+			derived_ciphertext_str,
+		),
+	)
 }
 }
 
 
 @(test)
 @(test)
-test_keccak_256 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", ""},
-		TestHash{"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45", "abc"},
-	}
-	for v, _ in test_vectors {
-		computed     := keccak.hash_256(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+test_chacha20poly1305 :: proc(t: ^testing.T) {
+	log(t, "Testing chacha20poly1205")
+
+	plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
+
+	aad := [12]byte {
+		0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
+		0xc4, 0xc5, 0xc6, 0xc7,
+	}
+
+	key := [chacha20poly1305.KEY_SIZE]byte {
+		0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+		0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+		0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+		0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+	}
+
+	nonce := [chacha20poly1305.NONCE_SIZE]byte {
+		0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43,
+		0x44, 0x45, 0x46, 0x47,
+	}
+
+	ciphertext := [114]byte {
+		0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
+		0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
+		0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
+		0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
+		0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
+		0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
+		0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
+		0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
+		0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
+		0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
+		0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
+		0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
+		0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
+		0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
+		0x61, 0x16,
+	}
+	ciphertext_str := string(hex.encode(ciphertext[:], context.temp_allocator))
+
+	tag := [chacha20poly1305.TAG_SIZE]byte {
+		0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
+		0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91,
+	}
+	tag_str := string(hex.encode(tag[:], context.temp_allocator))
+
+	derived_tag: [chacha20poly1305.TAG_SIZE]byte
+	derived_ciphertext: [114]byte
+
+	chacha20poly1305.encrypt(
+		derived_ciphertext[:],
+		derived_tag[:],
+		key[:],
+		nonce[:],
+		aad[:],
+		plaintext,
+	)
+
+	derived_ciphertext_str := string(hex.encode(derived_ciphertext[:], context.temp_allocator))
+	expect(
+		t,
+		derived_ciphertext_str == ciphertext_str,
+		fmt.tprintf(
+			"Expected ciphertext %s for encrypt(aad, plaintext), but got %s instead",
+			ciphertext_str,
+			derived_ciphertext_str,
+		),
+	)
+
+	derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
+	expect(
+		t,
+		derived_tag_str == tag_str,
+		fmt.tprintf(
+			"Expected tag %s for encrypt(aad, plaintext), but got %s instead",
+			tag_str,
+			derived_tag_str,
+		),
+	)
+
+	derived_plaintext: [114]byte
+	ok := chacha20poly1305.decrypt(
+		derived_plaintext[:],
+		tag[:],
+		key[:],
+		nonce[:],
+		aad[:],
+		ciphertext[:],
+	)
+	derived_plaintext_str := string(derived_plaintext[:])
+	expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
+	expect(
+		t,
+		derived_plaintext_str == _PLAINTEXT_SUNSCREEN_STR,
+		fmt.tprintf(
+			"Expected plaintext %s for decrypt(tag, aad, ciphertext), but got %s instead",
+			_PLAINTEXT_SUNSCREEN_STR,
+			derived_plaintext_str,
+		),
+	)
+
+	derived_ciphertext[0] ~= 0xa5
+	ok = chacha20poly1305.decrypt(
+		derived_plaintext[:],
+		tag[:],
+		key[:],
+		nonce[:],
+		aad[:],
+		derived_ciphertext[:],
+	)
+	expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
+
+	aad[0] ~= 0xa5
+	ok = chacha20poly1305.decrypt(
+		derived_plaintext[:],
+		tag[:],
+		key[:],
+		nonce[:],
+		aad[:],
+		ciphertext[:],
+	)
+	expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
 }
 }
 
 
-@(test)
-test_keccak_384 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff", ""},
-		TestHash{"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e", "abc"},
-	}
-	for v, _ in test_vectors {
-		computed     := keccak.hash_384(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+TestECDH :: struct {
+	scalar:  string,
+	point:   string,
+	product: string,
 }
 }
 
 
 @(test)
 @(test)
-test_keccak_512 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
-	// https://www.di-mgt.com.au/sha_testvectors.html
-	test_vectors := [?]TestHash {
-		TestHash{"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e", ""},
-		TestHash{"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96", "abc"},
+test_x25519 :: proc(t: ^testing.T) {
+	log(t, "Testing X25519")
+
+	// Local copy of this so that the base point doesn't need to be exported.
+	_BASE_POINT: [32]byte =  {
+		9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+		0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	}
+
+	test_vectors := [?]TestECDH {
+		// Test vectors from RFC 7748
+		{
+			"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
+			"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
+			"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
+		},
+		{
+			"4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d",
+			"e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493",
+			"95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957",
+		},
 	}
 	}
 	for v, _ in test_vectors {
 	for v, _ in test_vectors {
-		computed     := keccak.hash_512(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+		scalar, _ := hex.decode(transmute([]byte)(v.scalar), context.temp_allocator)
+		point, _ := hex.decode(transmute([]byte)(v.point), context.temp_allocator)
+
+		derived_point: [x25519.POINT_SIZE]byte
+		x25519.scalarmult(derived_point[:], scalar[:], point[:])
+		derived_point_str := string(hex.encode(derived_point[:], context.temp_allocator))
+
+		expect(
+			t,
+			derived_point_str == v.product,
+			fmt.tprintf(
+				"Expected %s for %s * %s, but got %s instead",
+				v.product,
+				v.scalar,
+				v.point,
+				derived_point_str,
+			),
+		)
+
+		// Abuse the test vectors to sanity-check the scalar-basepoint multiply.
+		p1, p2: [x25519.POINT_SIZE]byte
+		x25519.scalarmult_basepoint(p1[:], scalar[:])
+		x25519.scalarmult(p2[:], scalar[:], _BASE_POINT[:])
+		p1_str := string(hex.encode(p1[:], context.temp_allocator))
+		p2_str := string(hex.encode(p2[:], context.temp_allocator))
+		expect(
+			t,
+			p1_str == p2_str,
+			fmt.tprintf(
+				"Expected %s for %s * basepoint, but got %s instead",
+				p2_str,
+				v.scalar,
+				p1_str,
+			),
+		)
+	}
+
+	// TODO/tests: Run the wycheproof test vectors, once I figure out
+	// how to work with JSON.
 }
 }
 
 
 @(test)
 @(test)
-test_blake2b :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce", ""},
-		TestHash{"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918", "The quick brown fox jumps over the lazy dog"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake2b.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
+test_rand_bytes :: proc(t: ^testing.T) {
+	log(t, "Testing rand_bytes")
+
+	if ODIN_OS != .Linux {
+		log(t, "rand_bytes not supported - skipping")
+		return
+	}
+
+	allocator := context.allocator
+
+	buf := make([]byte, 1 << 25, allocator)
+	defer delete(buf)
+
+	// Testing a CSPRNG for correctness is incredibly involved and
+	// beyond the scope of an implementation that offloads
+	// responsibility for correctness to the OS.
+	//
+	// Just attempt to randomize a sufficiently large buffer, where
+	// sufficiently large is:
+	//  * Larger than the maximum getentropy request size (256 bytes).
+	//  * Larger than the maximum getrandom request size (2^25 - 1 bytes).
+	//
+	// While theoretically non-deterministic, if this fails, chances
+	// are the CSPRNG is busted.
+	seems_ok := false
+	for i := 0; i < 256; i = i + 1 {
+		mem.zero_explicit(raw_data(buf), len(buf))
+		crypto.rand_bytes(buf)
+
+		if buf[0] != 0 && buf[len(buf) - 1] != 0 {
+			seems_ok = true
+			break
+		}
 	}
 	}
-}
 
 
-@(test)
-test_blake2s :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9", ""},
-		TestHash{"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812", "The quick brown fox jumps over the lazy dog"},
-	}
-	for v, _ in test_vectors {
-		computed     := blake2s.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+	expect(
+		t,
+		seems_ok,
+		"Expected to randomize the head and tail of the buffer within a handful of attempts",
+	)
 }
 }
 
 
-@(test)
-test_sm3 :: proc(t: ^testing.T) {
-	test_vectors := [?]TestHash {
-		TestHash{"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b", ""},
-		TestHash{"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0", "abc"},
-		TestHash{"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732", "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"},
-		TestHash{"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea", "The quick brown fox jumps over the lazy dog"},
-		TestHash{"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098", "The quick brown fox jumps over the lazy cog"},
-	}
-	for v, _ in test_vectors {
-		computed     := sm3.hash(v.str)
-		computed_str := hex_string(computed[:])
-		expect(t, computed_str == v.hash, fmt.tprintf("Expected: %s for input of %s, but got %s instead", v.hash, v.str, computed_str))
-	}
+TestXOF :: struct {
+	sec_strength: int,
+	output:       string,
+	str:          string,
 }
 }
 
 
 @(test)
 @(test)
-test_siphash_2_4 :: proc(t: ^testing.T) {
-	// Test vectors from
-	// https://github.com/veorq/SipHash/blob/master/vectors.h
-	test_vectors := [?]u64 {
-		0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
-		0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
-		0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
-		0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
-		0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
-		0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
-		0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
-		0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
-		0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
-		0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
-		0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
-		0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
-		0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
-		0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
-		0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
-		0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
-	}
-
-	key: [16]byte
-	for i in 0..<16 {
-		key[i] = byte(i)
-	}
-
-	for i in 0..<len(test_vectors) {
-		data := make([]byte, i)
-		for j in 0..<i {
-			data[j] = byte(j)
+test_shake :: proc(t: ^testing.T) {
+	test_vectors := [?]TestXOF {
+		// SHAKE128
+		{
+			128,
+			"7f9c2ba4e88f827d616045507605853e",
+			"",
+		},
+		{
+			128,
+			"f4202e3c5852f9182a0430fd8144f0a7",
+			"The quick brown fox jumps over the lazy dog",
+		},
+		{
+			128,
+			"853f4538be0db9621a6cea659a06c110",
+			"The quick brown fox jumps over the lazy dof",
+		},
+
+		// SHAKE256
+		{
+			256,
+			"46b9dd2b0ba88d13233b3feb743eeb243fcd52ea62b81b82b50c27646ed5762f",
+			"",
+		},
+		{
+			256,
+			"2f671343d9b2e1604dc9dcf0753e5fe15c7c64a0d283cbbf722d411a0e36f6ca",
+			"The quick brown fox jumps over the lazy dog",
+		},
+		{
+			256,
+			"46b1ebb2e142c38b9ac9081bef72877fe4723959640fa57119b366ce6899d401",
+			"The quick brown fox jumps over the lazy dof",
+		},
+	}
+	for v in test_vectors {
+		dst := make([]byte, len(v.output)/2, context.temp_allocator)
+
+		data := transmute([]byte)(v.str)
+
+		ctx: shake.Context
+		switch v.sec_strength {
+		case 128:
+			shake.init_128(&ctx)
+		case 256:
+			shake.init_256(&ctx)
 		}
 		}
 
 
-		vector   := test_vectors[i]
-		computed := siphash.sum_2_4(data[:], key[:])
-
-		expect(t, computed == vector, fmt.tprintf("Expected: 0x%x for input of %v, but got 0x%x instead", vector, data, computed))
+		shake.write(&ctx, data)
+		shake.read(&ctx, dst)
+
+		dst_str := string(hex.encode(dst, context.temp_allocator))
+
+		expect(
+			t,
+			dst_str == v.output,
+			fmt.tprintf(
+				"SHAKE%d: Expected: %s for input of %s, but got %s instead",
+				v.sec_strength,
+				v.output,
+				v.str,
+				dst_str,
+			),
+		)
 	}
 	}
 }
 }

+ 616 - 0
tests/core/crypto/test_core_crypto_hash.odin

@@ -0,0 +1,616 @@
+package test_core_crypto
+
+import "core:bytes"
+import "core:encoding/hex"
+import "core:fmt"
+import "core:strings"
+import "core:testing"
+
+import "core:crypto/hash"
+
+TestHash :: struct {
+	algo: hash.Algorithm,
+	hash: string,
+	str:  string,
+}
+
+@(test)
+test_hash :: proc(t: ^testing.T) {
+	log(t, "Testing Hashes")
+
+	// TODO:
+	// - Stick the test vectors in a JSON file or something.
+	data_1_000_000_a := strings.repeat("a", 1_000_000)
+
+	digest: [64]byte // 512-bits is enough for every digest for now.
+	test_vectors := [?]TestHash {
+		// BLAKE2b
+		{
+			hash.Algorithm.BLAKE2B,
+			"786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce",
+			"",
+		},
+		{
+			hash.Algorithm.BLAKE2B,
+			"a8add4bdddfd93e4877d2746e62817b116364a1fa7bc148d95090bc7333b3673f82401cf7aa2e4cb1ecd90296e3f14cb5413f8ed77be73045b13914cdcd6a918",
+			"The quick brown fox jumps over the lazy dog",
+		},
+
+		// BLAKE2s
+		{
+			hash.Algorithm.BLAKE2S,
+			"69217a3079908094e11121d042354a7c1f55b6482ca1a51e1b250dfd1ed0eef9",
+			"",
+		},
+		{
+			hash.Algorithm.BLAKE2S,
+			"606beeec743ccbeff6cbcdf5d5302aa855c256c29b88c8ed331ea1a6bf3c8812",
+			"The quick brown fox jumps over the lazy dog",
+		},
+
+		// SHA-224
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		// - https://datatracker.ietf.org/doc/html/rfc3874#section-3.3
+		{
+			hash.Algorithm.SHA224,
+			"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f",
+			"",
+		},
+		{
+			hash.Algorithm.SHA224,
+			"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA224,
+			"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA224,
+			"c97ca9a559850ce97a04a96def6d99a9e0e0e2ab14e6b8df265fc0b3",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+		{
+			hash.Algorithm.SHA224,
+			"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67",
+			data_1_000_000_a,
+		},
+
+		// SHA-256
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA256,
+			"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+			"",
+		},
+		{
+			hash.Algorithm.SHA256,
+			"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA256,
+			"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA256,
+			"cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA-384
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA384,
+			"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b",
+			"",
+		},
+		{
+			hash.Algorithm.SHA384,
+			"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA384,
+			"3391fdddfc8dc7393707a65b1b4709397cf8b1d162af05abfe8f450de5f36bc6b0455a8520bc4e6f5fe95b1fe3c8452b",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA384,
+			"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA-512
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA512,
+			"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
+			"",
+		},
+		{
+			hash.Algorithm.SHA512,
+			"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA512,
+			"204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA512,
+			"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+		// SHA-512/256
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		{
+			hash.Algorithm.SHA512_256,
+			"53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA512_256,
+			"3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA3-224
+		//
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA3_224,
+			"6b4e03423667dbb73b6e15454f0eb1abd4597f9a1b078e3f5b5a6bc7",
+			"",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"e642824c3f8cf24ad09234ee7d3c766fc9a3a5168d0c94ad73b46fdf",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"10241ac5187380bd501192e4e56b5280908727dd8fe0d10d4e5ad91e",
+			"abcdbcdecdefdefgefghfghighijhi",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"fd645fe07d814c397e85e85f92fe58b949f55efa4d3468b2468da45a",
+			"jkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"9e86ff69557ca95f405f081269685b38e3a819b309ee942f482b6a8b",
+			"a",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"6961f694b2ff3ed6f0c830d2c66da0c5e7ca9445f7c0dca679171112",
+			"01234567012345670123456701234567",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"8a24108b154ada21c9fd5574494479ba5c7e7ab76ef264ead0fcce33",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_224,
+			"543e6868e1666c1a643630df77367ae5a62a85070a51c14cbf665cbc",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA3-256
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA3_256,
+			"a7ffc6f8bf1ed76651c14756a061d662f580ff4de43b49fa82d80a4b80f8434a",
+			"",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"3a985da74fe225b2045c172d6bd390bd855f086e3e9d525b46bfe24511431532",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"565ada1ced21278cfaffdde00dea0107964121ac25e4e978abc59412be74550a",
+			"abcdbcdecdefdefgefghfghighijhi",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"8cc1709d520f495ce972ece48b0d2e1f74ec80d53bc5c47457142158fae15d98",
+			"jkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"80084bf2fba02475726feb2cab2d8215eab14bc6bdd8bfb2c8151257032ecd8b",
+			"a",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"e4786de5f88f7d374b7288f225ea9f2f7654da200bab5d417e1fb52d49202767",
+			"01234567012345670123456701234567",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"41c0dba2a9d6240849100376a8235e2c82e1b9998a999e21db32dd97496d3376",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_256,
+			"916f6061fe879741ca6469b43971dfdb28b1a32dc36cb3254e812be27aad1d18",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA3-384
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA3_384,
+			"0c63a75b845e4f7d01107d852e4c2485c51a50aaaa94fc61995e71bbee983a2ac3713831264adb47fb6bd1e058d5f004",
+			"",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"ec01498288516fc926459f58e2c6ad8df9b473cb0fc08c2596da7cf0e49be4b298d88cea927ac7f539f1edf228376d25",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"9aa92dbb716ebb573def0d5e3cdd28d6add38ada310b602b8916e690a3257b7144e5ddd3d0dbbc559c48480d34d57a9a",
+			"abcdbcdecdefdefgefghfghighijhi",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"77c90323d7392bcdee8a3e7f74f19f47b7d1b1a825ac6a2d8d882a72317879cc26597035f1fc24fe65090b125a691282",
+			"jkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"1815f774f320491b48569efec794d249eeb59aae46d22bf77dafe25c5edc28d7ea44f93ee1234aa88f61c91912a4ccd9",
+			"a",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"51072590ad4c51b27ff8265590d74f92de7cc55284168e414ca960087c693285b08a283c6b19d77632994cb9eb93f1be",
+			"01234567012345670123456701234567",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"991c665755eb3a4b6bbdfb75c78a492e8c56a22c5c4d7e429bfdbc32b9d4ad5aa04a1f076e62fea19eef51acd0657c22",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_384,
+			"79407d3b5916b59c3e30b09822974791c313fb9ecc849e406f23592d04f625dc8c709b98b43b3852b337216179aa7fc7",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SHA3-512
+		// https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.SHA3_512,
+			"a69f73cca23a9ac5c8b567dc185a756e97c982164fe25859e0d1dcc1475c80a615b2123af1f5f94c11e3e9402c3ac558f500199d95b6d3e301758586281dcd26",
+			"",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"b751850b1a57168a5693cd924b6b096e08f621827444f70d884f5d0240d2712e10e116e9192af3c91a7ec57647e3934057340b4cf408d5a56592f8274eec53f0",
+			"abc",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"9f9a327944a35988d67effc4fa748b3c07744f736ac70b479d8e12a3d10d6884d00a7ef593690305462e9e9030a67c51636fd346fd8fa0ee28a5ac2aee103d2e",
+			"abcdbcdecdefdefgefghfghighijhi",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"dbb124a0deda966eb4d199d0844fa0beb0770ea1ccddabcd335a7939a931ac6fb4fa6aebc6573f462ced2e4e7178277803be0d24d8bc2864626d9603109b7891",
+			"jkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"697f2d856172cb8309d6b8b97dac4de344b549d4dee61edfb4962d8698b7fa803f4f93ff24393586e28b5b957ac3d1d369420ce53332712f997bd336d09ab02a",
+			"a",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"5679e353bc8eeea3e801ca60448b249bcfd3ac4a6c3abe429a807bcbd4c9cd12da87a5a9dc74fde64c0d44718632cae966b078397c6f9ec155c6a238f2347cf1",
+			"01234567012345670123456701234567",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"04a371e84ecfb5b8b77cb48610fca8182dd457ce6f326a0fd3d7ec2f1e91636dee691fbe0c985302ba1b0d8dc78c086346b533b49c030d99a27daf1139d6e75e",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.SHA3_512,
+			"afebb2ef542e6579c50cad06d2e578f9f8dd6881d7dc824d26360feebf18a4fa73e3261122948efcfd492e74e82e2189ed0fb440d187f382270cb455f21dd185",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+
+		// SM3
+		{
+			hash.Algorithm.SM3,
+			"1ab21d8355cfa17f8e61194831e81a8f22bec8c728fefb747ed035eb5082aa2b",
+			"",
+		},
+		{
+			hash.Algorithm.SM3,
+			"66c7f0f462eeedd9d1f2d46bdc10e4e24167c4875cf2f7a2297da02b8f4ba8e0",
+			"abc",
+		},
+		{
+			hash.Algorithm.SM3,
+			"debe9ff92275b8a138604889c18e5a4d6fdb70e5387e5765293dcba39c0c5732",
+			"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd",
+		},
+		{
+			hash.Algorithm.SM3,
+			"5fdfe814b8573ca021983970fc79b2218c9570369b4859684e2e4c3fc76cb8ea",
+			"The quick brown fox jumps over the lazy dog",
+		},
+		{
+			hash.Algorithm.SM3,
+			"ca27d14a42fc04c1e5ecf574a95a8c2d70ecb5805e9b429026ccac8f28b20098",
+			"The quick brown fox jumps over the lazy cog",
+		},
+
+		// Keccak-224 (Legacy)
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.Legacy_KECCAK_224,
+			"f71837502ba8e10837bdd8d365adb85591895602fc552b48b7390abd",
+			"",
+		},
+		{
+			hash.Algorithm.Legacy_KECCAK_224,
+			"c30411768506ebe1c2871b1ee2e87d38df342317300a9b97a95ec6a8",
+			"abc",
+		},
+
+		// Keccak-256 (Legacy)
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.Legacy_KECCAK_256,
+			"c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
+			"",
+		},
+		{
+			hash.Algorithm.Legacy_KECCAK_256,
+			"4e03657aea45a94fc7d47ba826c8d667c0d1e6e33a64a036ec44f58fa12d6c45",
+			"abc",
+		},
+
+		// Keccak-384 (Legacy)
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.Legacy_KECCAK_384,
+			"2c23146a63a29acf99e73b88f8c24eaa7dc60aa771780ccc006afbfa8fe2479b2dd2b21362337441ac12b515911957ff",
+			"",
+		},
+		{
+			hash.Algorithm.Legacy_KECCAK_384,
+			"f7df1165f033337be098e7d288ad6a2f74409d7a60b49c36642218de161b1f99f8c681e4afaf31a34db29fb763e3c28e",
+			"abc",
+		},
+
+		// Keccak-512 (Legacy)
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		{
+			hash.Algorithm.Legacy_KECCAK_512,
+			"0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e",
+			"",
+		},
+		{
+			hash.Algorithm.Legacy_KECCAK_512,
+			"18587dc2ea106b9a1563e32b3312421ca164c7f1f07bc922a9c83d77cea3a1e5d0c69910739025372dc14ac9642629379540c17e2a65b19d77aa511a9d00bb96",
+			"abc",
+		},
+
+		// MD5 (Insecure)
+		// - https://datatracker.ietf.org/doc/html/rfc1321
+		TestHash{hash.Algorithm.Insecure_MD5, "d41d8cd98f00b204e9800998ecf8427e", ""},
+		TestHash{hash.Algorithm.Insecure_MD5, "0cc175b9c0f1b6a831c399e269772661", "a"},
+		TestHash{hash.Algorithm.Insecure_MD5, "900150983cd24fb0d6963f7d28e17f72", "abc"},
+		{
+			hash.Algorithm.Insecure_MD5,
+			"f96b697d7cb7938d525a2f31aaf161d0",
+			"message digest",
+		},
+		{
+			hash.Algorithm.Insecure_MD5,
+			"c3fcd3d76192e4007dfb496cca67e13b",
+			"abcdefghijklmnopqrstuvwxyz",
+		},
+		{
+			hash.Algorithm.Insecure_MD5,
+			"d174ab98d277d9f5a5611c2c9f419d9f",
+			"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
+		},
+		{
+			hash.Algorithm.Insecure_MD5,
+			"57edf4a22be3c955ac49da2e2107b67a",
+			"12345678901234567890123456789012345678901234567890123456789012345678901234567890",
+		},
+
+		// SHA-1 (Insecure)
+		// - https://csrc.nist.gov/csrc/media/projects/cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf
+		// - https://www.di-mgt.com.au/sha_testvectors.html
+		TestHash{hash.Algorithm.Insecure_SHA1, "da39a3ee5e6b4b0d3255bfef95601890afd80709", ""},
+		TestHash{hash.Algorithm.Insecure_SHA1, "a9993e364706816aba3e25717850c26c9cd0d89d", "abc"},
+		{
+			hash.Algorithm.Insecure_SHA1,
+			"f9537c23893d2014f365adf8ffe33b8eb0297ed1",
+			"abcdbcdecdefdefgefghfghighijhi",
+		},
+		{
+			hash.Algorithm.Insecure_SHA1,
+			"346fb528a24b48f563cb061470bcfd23740427ad",
+			"jkijkljklmklmnlmnomnopnopq",
+		},
+		TestHash{hash.Algorithm.Insecure_SHA1, "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", "a"},
+		{
+			hash.Algorithm.Insecure_SHA1,
+			"c729c8996ee0a6f74f4f3248e8957edf704fb624",
+			"01234567012345670123456701234567",
+		},
+		{
+			hash.Algorithm.Insecure_SHA1,
+			"84983e441c3bd26ebaae4aa1f95129e5e54670f1",
+			"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+		},
+		{
+			hash.Algorithm.Insecure_SHA1,
+			"a49b2446a02c645bf419f995b67091253a04a259",
+			"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
+		},
+	}
+	for v, _ in test_vectors {
+		algo_name := hash.ALGORITHM_NAMES[v.algo]
+		dst := digest[:hash.DIGEST_SIZES[v.algo]]
+
+		data := transmute([]byte)(v.str)
+
+		ctx: hash.Context
+		hash.init(&ctx, v.algo)
+		hash.update(&ctx, data)
+		hash.final(&ctx, dst)
+
+		dst_str := string(hex.encode(dst, context.temp_allocator))
+
+		expect(
+			t,
+			dst_str == v.hash,
+			fmt.tprintf(
+				"%s/incremental: Expected: %s for input of %s, but got %s instead",
+				algo_name,
+				v.hash,
+				v.str,
+				dst_str,
+			),
+		)
+	}
+
+	for algo in hash.Algorithm {
+		// Skip the sentinel value.
+		if algo == .Invalid {
+			continue
+		}
+
+		algo_name := hash.ALGORITHM_NAMES[algo]
+
+		// Ensure that the MAX_(DIGEST_SIZE, BLOCK_SIZE) constants are
+		// still correct.
+		digest_sz := hash.DIGEST_SIZES[algo]
+		block_sz := hash.BLOCK_SIZES[algo]
+		expect(
+			t,
+			digest_sz <= hash.MAX_DIGEST_SIZE,
+			fmt.tprintf(
+				"%s: Digest size %d exceeds max %d",
+				algo_name,
+				digest_sz,
+				hash.MAX_DIGEST_SIZE,
+			),
+		)
+		expect(
+			t,
+			block_sz <= hash.MAX_BLOCK_SIZE,
+			fmt.tprintf(
+				"%s: Block size %d exceeds max %d",
+				algo_name,
+				block_sz,
+				hash.MAX_BLOCK_SIZE,
+			),
+		)
+
+		// Exercise most of the happy-path for the high level interface.
+		rd: bytes.Reader
+		bytes.reader_init(&rd, transmute([]byte)(data_1_000_000_a))
+		st := bytes.reader_to_stream(&rd)
+
+		digest_a, _ := hash.hash_stream(algo, st, context.temp_allocator)
+		digest_b := hash.hash_string(algo, data_1_000_000_a, context.temp_allocator)
+
+		a_str := string(hex.encode(digest_a, context.temp_allocator))
+		b_str := string(hex.encode(digest_b, context.temp_allocator))
+
+		expect(
+			t,
+			a_str == b_str,
+			fmt.tprintf(
+				"%s/cmp: Expected: %s (hash_stream) == %s (hash_bytes)",
+				algo_name,
+				a_str,
+				b_str,
+			),
+		)
+
+		// Exercise the rolling digest functionality, which also covers
+		// each implementation's clone routine.
+		ctx, ctx_clone: hash.Context
+		hash.init(&ctx, algo)
+
+		api_algo := hash.algorithm(&ctx)
+		api_digest_size := hash.digest_size(&ctx)
+		expect(
+			t,
+			algo == api_algo,
+			fmt.tprintf(
+				"%s/algorithm: Expected: %v but got %v instead",
+				algo_name,
+				algo,
+				api_algo,
+			),
+		)
+		expect(
+			t,
+			hash.DIGEST_SIZES[algo] == api_digest_size,
+			fmt.tprintf(
+				"%s/digest_size: Expected: %d but got %d instead",
+				algo_name,
+				hash.DIGEST_SIZES[algo],
+				api_digest_size,
+			),
+		)
+
+		hash.update(&ctx, digest_a)
+		hash.clone(&ctx_clone, &ctx)
+		hash.final(&ctx, digest_a, true)
+		hash.final(&ctx, digest_b)
+
+		digest_c := make([]byte, hash.digest_size(&ctx_clone), context.temp_allocator)
+		hash.final(&ctx_clone, digest_c)
+
+		a_str = string(hex.encode(digest_a, context.temp_allocator))
+		b_str = string(hex.encode(digest_b, context.temp_allocator))
+		c_str := string(hex.encode(digest_c, context.temp_allocator))
+
+		expect(
+			t,
+			a_str == b_str && b_str == c_str,
+			fmt.tprintf(
+				"%s/rolling: Expected: %s (first) == %s (second) == %s (third)",
+				algo_name,
+				a_str,
+				b_str,
+				c_str,
+			),
+		)
+	}
+}

+ 241 - 0
tests/core/crypto/test_core_crypto_mac.odin

@@ -0,0 +1,241 @@
+package test_core_crypto
+
+import "core:encoding/hex"
+import "core:fmt"
+import "core:mem"
+import "core:testing"
+
+import "core:crypto/hash"
+import "core:crypto/hmac"
+import "core:crypto/poly1305"
+import "core:crypto/siphash"
+
+@(test)
+test_mac :: proc(t: ^testing.T) {
+	log(t, "Testing MACs")
+
+	test_hmac(t)
+	test_poly1305(t)
+	test_siphash_2_4(t)
+}
+
+@(test)
+test_hmac :: proc(t: ^testing.T) {
+	// Test cases pulled out of RFC 6234, note that HMAC is a generic
+	// construct so as long as the underlying hash is correct and all
+	// the code paths are covered the implementation is "fine", so
+	// this only exercises SHA256.
+
+	test_keys := [?]string {
+		"\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b",
+		"Jefe",
+		"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
+		"\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19",
+		"\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x0c",
+		"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
+		"\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa",
+	}
+
+	test_msgs := [?]string {
+		"Hi There",
+		"what do ya want for nothing?",
+		"\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd\xdd",
+		"\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd\xcd",
+		"Test With Truncation",
+		"Test Using Larger Than Block-Size Key - Hash Key First",
+		"This is a test using a larger than block-size key and a larger than block-size data. The key needs to be hashed before being used by the HMAC algorithm.",
+	}
+
+	tags_sha256 := [?]string {
+		"b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7",
+		"5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843",
+		"773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe",
+		"82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b",
+		"a3b6167473100ee06e0c796c2955552b",
+		"60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54",
+		"9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2",
+	}
+
+	algo := hash.Algorithm.SHA256
+
+	tag: [64]byte // 512-bits is enough for every digest for now.
+	for k, i in test_keys {
+		algo_name := hash.ALGORITHM_NAMES[algo]
+		dst := tag[:hash.DIGEST_SIZES[algo]]
+
+		key := transmute([]byte)(k)
+		msg := transmute([]byte)(test_msgs[i])
+
+		ctx: hmac.Context
+		hmac.init(&ctx, algo, key)
+		hmac.update(&ctx, msg)
+		hmac.final(&ctx, dst)
+
+		// For simplicity crypto/hmac does not support truncation, but
+		// test it by truncating the tag down as appropriate based on
+		// the expected value.
+		expected_str := tags_sha256[i]
+		tag_len := len(expected_str) / 2
+
+		key_str := string(hex.encode(key, context.temp_allocator))
+		msg_str := string(hex.encode(msg, context.temp_allocator))
+		dst_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
+
+		expect(
+			t,
+			dst_str == expected_str,
+			fmt.tprintf(
+				"%s/incremental: Expected: %s for input of %s - %s, but got %s instead",
+				algo_name,
+				tags_sha256[i],
+				key_str,
+				msg_str,
+				dst_str,
+			),
+		)
+
+		hmac.sum(algo, dst, msg, key)
+		oneshot_str := string(hex.encode(dst[:tag_len], context.temp_allocator))
+
+		expect(
+			t,
+			oneshot_str == expected_str,
+			fmt.tprintf(
+				"%s/oneshot: Expected: %s for input of %s - %s, but got %s instead",
+				algo_name,
+				tags_sha256[i],
+				key_str,
+				msg_str,
+				oneshot_str,
+			),
+		)
+	}
+}
+
+@(test)
+test_poly1305 :: proc(t: ^testing.T) {
+	log(t, "Testing poly1305")
+
+	// Test cases taken from poly1305-donna.
+	key := [poly1305.KEY_SIZE]byte {
+		0xee, 0xa6, 0xa7, 0x25, 0x1c, 0x1e, 0x72, 0x91,
+		0x6d, 0x11, 0xc2, 0xcb, 0x21, 0x4d, 0x3c, 0x25,
+		0x25, 0x39, 0x12, 0x1d, 0x8e, 0x23, 0x4e, 0x65,
+		0x2d, 0x65, 0x1f, 0xa4, 0xc8, 0xcf, 0xf8, 0x80,
+	}
+
+	msg := [131]byte {
+		0x8e, 0x99, 0x3b, 0x9f, 0x48, 0x68, 0x12, 0x73,
+		0xc2, 0x96, 0x50, 0xba, 0x32, 0xfc, 0x76, 0xce,
+		0x48, 0x33, 0x2e, 0xa7, 0x16, 0x4d, 0x96, 0xa4,
+		0x47, 0x6f, 0xb8, 0xc5, 0x31, 0xa1, 0x18, 0x6a,
+		0xc0, 0xdf, 0xc1, 0x7c, 0x98, 0xdc, 0xe8, 0x7b,
+		0x4d, 0xa7, 0xf0, 0x11, 0xec, 0x48, 0xc9, 0x72,
+		0x71, 0xd2, 0xc2, 0x0f, 0x9b, 0x92, 0x8f, 0xe2,
+		0x27, 0x0d, 0x6f, 0xb8, 0x63, 0xd5, 0x17, 0x38,
+		0xb4, 0x8e, 0xee, 0xe3, 0x14, 0xa7, 0xcc, 0x8a,
+		0xb9, 0x32, 0x16, 0x45, 0x48, 0xe5, 0x26, 0xae,
+		0x90, 0x22, 0x43, 0x68, 0x51, 0x7a, 0xcf, 0xea,
+		0xbd, 0x6b, 0xb3, 0x73, 0x2b, 0xc0, 0xe9, 0xda,
+		0x99, 0x83, 0x2b, 0x61, 0xca, 0x01, 0xb6, 0xde,
+		0x56, 0x24, 0x4a, 0x9e, 0x88, 0xd5, 0xf9, 0xb3,
+		0x79, 0x73, 0xf6, 0x22, 0xa4, 0x3d, 0x14, 0xa6,
+		0x59, 0x9b, 0x1f, 0x65, 0x4c, 0xb4, 0x5a, 0x74,
+		0xe3, 0x55, 0xa5,
+	}
+
+	tag := [poly1305.TAG_SIZE]byte {
+		0xf3, 0xff, 0xc7, 0x70, 0x3f, 0x94, 0x00, 0xe5,
+		0x2a, 0x7d, 0xfb, 0x4b, 0x3d, 0x33, 0x05, 0xd9,
+	}
+	tag_str := string(hex.encode(tag[:], context.temp_allocator))
+
+	// Verify - oneshot + compare
+	ok := poly1305.verify(tag[:], msg[:], key[:])
+	expect(t, ok, "oneshot verify call failed")
+
+	// Sum - oneshot
+	derived_tag: [poly1305.TAG_SIZE]byte
+	poly1305.sum(derived_tag[:], msg[:], key[:])
+	derived_tag_str := string(hex.encode(derived_tag[:], context.temp_allocator))
+	expect(
+		t,
+		derived_tag_str == tag_str,
+		fmt.tprintf("Expected %s for sum(msg, key), but got %s instead", tag_str, derived_tag_str),
+	)
+
+	// Incremental
+	mem.zero(&derived_tag, size_of(derived_tag))
+	ctx: poly1305.Context = ---
+	poly1305.init(&ctx, key[:])
+	read_lengths := [11]int{32, 64, 16, 8, 4, 2, 1, 1, 1, 1, 1}
+	off := 0
+	for read_length in read_lengths {
+		to_read := msg[off:off + read_length]
+		poly1305.update(&ctx, to_read)
+		off = off + read_length
+	}
+	poly1305.final(&ctx, derived_tag[:])
+	derived_tag_str = string(hex.encode(derived_tag[:], context.temp_allocator))
+	expect(
+		t,
+		derived_tag_str == tag_str,
+		fmt.tprintf(
+			"Expected %s for init/update/final - incremental, but got %s instead",
+			tag_str,
+			derived_tag_str,
+		),
+	)
+}
+
+@(test)
+test_siphash_2_4 :: proc(t: ^testing.T) {
+	log(t, "Testing SipHash-2-4")
+
+	// Test vectors from
+	// https://github.com/veorq/SipHash/blob/master/vectors.h
+	test_vectors := [?]u64 {
+		0x726fdb47dd0e0e31, 0x74f839c593dc67fd, 0x0d6c8009d9a94f5a, 0x85676696d7fb7e2d,
+		0xcf2794e0277187b7, 0x18765564cd99a68d, 0xcbc9466e58fee3ce, 0xab0200f58b01d137,
+		0x93f5f5799a932462, 0x9e0082df0ba9e4b0, 0x7a5dbbc594ddb9f3, 0xf4b32f46226bada7,
+		0x751e8fbc860ee5fb, 0x14ea5627c0843d90, 0xf723ca908e7af2ee, 0xa129ca6149be45e5,
+		0x3f2acc7f57c29bdb, 0x699ae9f52cbe4794, 0x4bc1b3f0968dd39c, 0xbb6dc91da77961bd,
+		0xbed65cf21aa2ee98, 0xd0f2cbb02e3b67c7, 0x93536795e3a33e88, 0xa80c038ccd5ccec8,
+		0xb8ad50c6f649af94, 0xbce192de8a85b8ea, 0x17d835b85bbb15f3, 0x2f2e6163076bcfad,
+		0xde4daaaca71dc9a5, 0xa6a2506687956571, 0xad87a3535c49ef28, 0x32d892fad841c342,
+		0x7127512f72f27cce, 0xa7f32346f95978e3, 0x12e0b01abb051238, 0x15e034d40fa197ae,
+		0x314dffbe0815a3b4, 0x027990f029623981, 0xcadcd4e59ef40c4d, 0x9abfd8766a33735c,
+		0x0e3ea96b5304a7d0, 0xad0c42d6fc585992, 0x187306c89bc215a9, 0xd4a60abcf3792b95,
+		0xf935451de4f21df2, 0xa9538f0419755787, 0xdb9acddff56ca510, 0xd06c98cd5c0975eb,
+		0xe612a3cb9ecba951, 0xc766e62cfcadaf96, 0xee64435a9752fe72, 0xa192d576b245165a,
+		0x0a8787bf8ecb74b2, 0x81b3e73d20b49b6f, 0x7fa8220ba3b2ecea, 0x245731c13ca42499,
+		0xb78dbfaf3a8d83bd, 0xea1ad565322a1a0b, 0x60e61c23a3795013, 0x6606d7e446282b93,
+		0x6ca4ecb15c5f91e1, 0x9f626da15c9625f3, 0xe51b38608ef25f57, 0x958a324ceb064572,
+	}
+
+	key: [16]byte
+	for i in 0 ..< 16 {
+		key[i] = byte(i)
+	}
+
+	for i in 0 ..< len(test_vectors) {
+		data := make([]byte, i)
+		for j in 0 ..< i {
+			data[j] = byte(j)
+		}
+
+		vector := test_vectors[i]
+		computed := siphash.sum_2_4(data[:], key[:])
+
+		expect(
+			t,
+			computed == vector,
+			fmt.tprintf(
+				"Expected: 0x%x for input of %v, but got 0x%x instead",
+				vector,
+				data,
+				computed,
+			),
+		)
+	}
+}

+ 0 - 541
tests/core/crypto/test_core_crypto_modern.odin

@@ -1,541 +0,0 @@
-package test_core_crypto
-
-import "core:testing"
-import "core:fmt"
-import "core:mem"
-import "core:time"
-import "core:crypto"
-
-import "core:crypto/chacha20"
-import "core:crypto/chacha20poly1305"
-import "core:crypto/poly1305"
-import "core:crypto/x25519"
-
-_digit_value :: proc(r: rune) -> int {
-	ri := int(r)
-	v: int = 16
-	switch r {
-	case '0'..='9': v = ri-'0'
-	case 'a'..='z': v = ri-'a'+10
-	case 'A'..='Z': v = ri-'A'+10
-	}
-	return v
-}
-
-_decode_hex32 :: proc(s: string) -> [32]byte{
-	b: [32]byte
-	for i := 0; i < len(s); i = i + 2 {
-		hi := _digit_value(rune(s[i]))
-		lo := _digit_value(rune(s[i+1]))
-		b[i/2] = byte(hi << 4 | lo)
-	}
-	return b
-}
-
-_PLAINTEXT_SUNSCREEN_STR := "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."
-
-@(test)
-test_chacha20 :: proc(t: ^testing.T) {
-	log(t, "Testing (X)ChaCha20")
-
-	// Test cases taken from RFC 8439, and draft-irtf-cfrg-xchacha-03
-	plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
-
-	key := [chacha20.KEY_SIZE]byte{
-		0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
-		0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
-		0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
-		0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
-	}
-
-	nonce := [chacha20.NONCE_SIZE]byte{
-		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a,
-		0x00, 0x00, 0x00, 0x00,
-	}
-
-	ciphertext := [114]byte{
-		0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
-		0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
-		0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
-		0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b,
-		0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab,
-		0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57,
-		0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab,
-		0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8,
-		0x07, 0xca, 0x0d, 0xbf, 0x50, 0x0d, 0x6a, 0x61,
-		0x56, 0xa3, 0x8e, 0x08, 0x8a, 0x22, 0xb6, 0x5e,
-		0x52, 0xbc, 0x51, 0x4d, 0x16, 0xcc, 0xf8, 0x06,
-		0x81, 0x8c, 0xe9, 0x1a, 0xb7, 0x79, 0x37, 0x36,
-		0x5a, 0xf9, 0x0b, 0xbf, 0x74, 0xa3, 0x5b, 0xe6,
-		0xb4, 0x0b, 0x8e, 0xed, 0xf2, 0x78, 0x5e, 0x42,
-		0x87, 0x4d,
-	}
-	ciphertext_str := hex_string(ciphertext[:])
-
-	derived_ciphertext: [114]byte
-	ctx: chacha20.Context = ---
-	chacha20.init(&ctx, key[:], nonce[:])
-	chacha20.seek(&ctx, 1) // The test vectors start the counter at 1.
-	chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
-
-	derived_ciphertext_str := hex_string(derived_ciphertext[:])
-	expect(t, derived_ciphertext_str == ciphertext_str, fmt.tprintf("Expected %s for xor_bytes(plaintext_str), but got %s instead", ciphertext_str, derived_ciphertext_str))
-
-	xkey := [chacha20.KEY_SIZE]byte{
-		0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
-		0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
-		0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
-		0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
-	}
-
-	xnonce := [chacha20.XNONCE_SIZE]byte{
-		0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
-		0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
-		0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
-	}
-
-	xciphertext := [114]byte{
-		0xbd, 0x6d, 0x17, 0x9d, 0x3e, 0x83, 0xd4, 0x3b,
-		0x95, 0x76, 0x57, 0x94, 0x93, 0xc0, 0xe9, 0x39,
-		0x57, 0x2a, 0x17, 0x00, 0x25, 0x2b, 0xfa, 0xcc,
-		0xbe, 0xd2, 0x90, 0x2c, 0x21, 0x39, 0x6c, 0xbb,
-		0x73, 0x1c, 0x7f, 0x1b, 0x0b, 0x4a, 0xa6, 0x44,
-		0x0b, 0xf3, 0xa8, 0x2f, 0x4e, 0xda, 0x7e, 0x39,
-		0xae, 0x64, 0xc6, 0x70, 0x8c, 0x54, 0xc2, 0x16,
-		0xcb, 0x96, 0xb7, 0x2e, 0x12, 0x13, 0xb4, 0x52,
-		0x2f, 0x8c, 0x9b, 0xa4, 0x0d, 0xb5, 0xd9, 0x45,
-		0xb1, 0x1b, 0x69, 0xb9, 0x82, 0xc1, 0xbb, 0x9e,
-		0x3f, 0x3f, 0xac, 0x2b, 0xc3, 0x69, 0x48, 0x8f,
-		0x76, 0xb2, 0x38, 0x35, 0x65, 0xd3, 0xff, 0xf9,
-		0x21, 0xf9, 0x66, 0x4c, 0x97, 0x63, 0x7d, 0xa9,
-		0x76, 0x88, 0x12, 0xf6, 0x15, 0xc6, 0x8b, 0x13,
-		0xb5, 0x2e,
-	}
-	xciphertext_str := hex_string(xciphertext[:])
-
-	chacha20.init(&ctx, xkey[:], xnonce[:])
-	chacha20.seek(&ctx, 1)
-	chacha20.xor_bytes(&ctx, derived_ciphertext[:], plaintext[:])
-
-	derived_ciphertext_str = hex_string(derived_ciphertext[:])
-	expect(t, derived_ciphertext_str == xciphertext_str, fmt.tprintf("Expected %s for xor_bytes(plaintext_str), but got %s instead", xciphertext_str, derived_ciphertext_str))
-}
-
-@(test)
-test_poly1305 :: proc(t: ^testing.T) {
-	log(t, "Testing poly1305")
-
-	// Test cases taken from poly1305-donna.
-	key := [poly1305.KEY_SIZE]byte{
-		0xee,0xa6,0xa7,0x25,0x1c,0x1e,0x72,0x91,
-		0x6d,0x11,0xc2,0xcb,0x21,0x4d,0x3c,0x25,
-		0x25,0x39,0x12,0x1d,0x8e,0x23,0x4e,0x65,
-		0x2d,0x65,0x1f,0xa4,0xc8,0xcf,0xf8,0x80,
-	}
-
-	msg := [131]byte{
-		0x8e,0x99,0x3b,0x9f,0x48,0x68,0x12,0x73,
-		0xc2,0x96,0x50,0xba,0x32,0xfc,0x76,0xce,
-		0x48,0x33,0x2e,0xa7,0x16,0x4d,0x96,0xa4,
-		0x47,0x6f,0xb8,0xc5,0x31,0xa1,0x18,0x6a,
-		0xc0,0xdf,0xc1,0x7c,0x98,0xdc,0xe8,0x7b,
-		0x4d,0xa7,0xf0,0x11,0xec,0x48,0xc9,0x72,
-		0x71,0xd2,0xc2,0x0f,0x9b,0x92,0x8f,0xe2,
-		0x27,0x0d,0x6f,0xb8,0x63,0xd5,0x17,0x38,
-		0xb4,0x8e,0xee,0xe3,0x14,0xa7,0xcc,0x8a,
-		0xb9,0x32,0x16,0x45,0x48,0xe5,0x26,0xae,
-		0x90,0x22,0x43,0x68,0x51,0x7a,0xcf,0xea,
-		0xbd,0x6b,0xb3,0x73,0x2b,0xc0,0xe9,0xda,
-		0x99,0x83,0x2b,0x61,0xca,0x01,0xb6,0xde,
-		0x56,0x24,0x4a,0x9e,0x88,0xd5,0xf9,0xb3,
-		0x79,0x73,0xf6,0x22,0xa4,0x3d,0x14,0xa6,
-		0x59,0x9b,0x1f,0x65,0x4c,0xb4,0x5a,0x74,
-		0xe3,0x55,0xa5,
-	}
-
-	tag := [poly1305.TAG_SIZE]byte{
-		0xf3,0xff,0xc7,0x70,0x3f,0x94,0x00,0xe5,
-		0x2a,0x7d,0xfb,0x4b,0x3d,0x33,0x05,0xd9,
-	}
-	tag_str := hex_string(tag[:])
-
-	// Verify - oneshot + compare
-	ok := poly1305.verify(tag[:], msg[:], key[:])
-	expect(t, ok, "oneshot verify call failed")
-
-	// Sum - oneshot
-	derived_tag: [poly1305.TAG_SIZE]byte
-	poly1305.sum(derived_tag[:], msg[:], key[:])
-	derived_tag_str := hex_string(derived_tag[:])
-	expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected %s for sum(msg, key), but got %s instead", tag_str, derived_tag_str))
-
-	// Incremental
-	mem.zero(&derived_tag, size_of(derived_tag))
-	ctx: poly1305.Context = ---
-	poly1305.init(&ctx, key[:])
-	read_lengths := [11]int{32, 64, 16, 8, 4, 2, 1, 1, 1, 1, 1}
-	off := 0
-	for read_length in read_lengths {
-		to_read := msg[off:off+read_length]
-		poly1305.update(&ctx, to_read)
-		off = off + read_length
-	}
-	poly1305.final(&ctx, derived_tag[:])
-	derived_tag_str = hex_string(derived_tag[:])
-	expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected %s for init/update/final - incremental, but got %s instead", tag_str, derived_tag_str))
-}
-
-@(test)
-test_chacha20poly1305 :: proc(t: ^testing.T) {
-	log(t, "Testing chacha20poly1205")
-
-	plaintext := transmute([]byte)(_PLAINTEXT_SUNSCREEN_STR)
-
-	aad := [12]byte{
-		0x50, 0x51, 0x52, 0x53, 0xc0, 0xc1, 0xc2, 0xc3,
-		0xc4, 0xc5, 0xc6, 0xc7,
-	}
-
-	key := [chacha20poly1305.KEY_SIZE]byte{
-		0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
-		0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
-		0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
-		0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
-	}
-
-	nonce := [chacha20poly1305.NONCE_SIZE]byte{
-		0x07, 0x00, 0x00, 0x00,
-		0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
-	}
-
-	ciphertext := [114]byte{
-		0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
-		0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
-		0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
-		0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
-		0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
-		0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
-		0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
-		0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
-		0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
-		0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
-		0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
-		0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
-		0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
-		0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
-		0x61, 0x16,
-	}
-	ciphertext_str := hex_string(ciphertext[:])
-
-	tag := [chacha20poly1305.TAG_SIZE]byte{
-		0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
-		0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91,
-	}
-	tag_str := hex_string(tag[:])
-
-	derived_tag: [chacha20poly1305.TAG_SIZE]byte
-	derived_ciphertext: [114]byte
-
-	chacha20poly1305.encrypt(derived_ciphertext[:], derived_tag[:], key[:], nonce[:], aad[:], plaintext)
-
-	derived_ciphertext_str := hex_string(derived_ciphertext[:])
-	expect(t, derived_ciphertext_str == ciphertext_str, fmt.tprintf("Expected ciphertext %s for encrypt(aad, plaintext), but got %s instead", ciphertext_str, derived_ciphertext_str))
-
-	derived_tag_str := hex_string(derived_tag[:])
-	expect(t, derived_tag_str == tag_str, fmt.tprintf("Expected tag %s for encrypt(aad, plaintext), but got %s instead", tag_str, derived_tag_str))
-
-	derived_plaintext: [114]byte
-	ok := chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], ciphertext[:])
-	derived_plaintext_str := string(derived_plaintext[:])
-	expect(t, ok, "Expected true for decrypt(tag, aad, ciphertext)")
-	expect(t, derived_plaintext_str == _PLAINTEXT_SUNSCREEN_STR, fmt.tprintf("Expected plaintext %s for decrypt(tag, aad, ciphertext), but got %s instead", _PLAINTEXT_SUNSCREEN_STR, derived_plaintext_str))
-
-	derived_ciphertext[0] ~= 0xa5
-	ok = chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], derived_ciphertext[:])
-	expect(t, !ok, "Expected false for decrypt(tag, aad, corrupted_ciphertext)")
-
-	aad[0] ~= 0xa5
-	ok = chacha20poly1305.decrypt(derived_plaintext[:], tag[:], key[:], nonce[:], aad[:], ciphertext[:])
-	expect(t, !ok, "Expected false for decrypt(tag, corrupted_aad, ciphertext)")
-}
-
-TestECDH :: struct {
-	scalar:  string,
-	point:   string,
-	product: string,
-}
-
-@(test)
-test_x25519 :: proc(t: ^testing.T) {
-	log(t, "Testing X25519")
-
-	// Local copy of this so that the base point doesn't need to be exported.
-	_BASE_POINT: [32]byte = {
-		9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-		0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-	}
-
-	test_vectors := [?]TestECDH {
-		// Test vectors from RFC 7748
-		TestECDH{
-			"a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4",
-			"e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c",
-			"c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552",
-		},
-		TestECDH{
-			"4b66e9d4d1b4673c5ad22691957d6af5c11b6421e0ea01d42ca4169e7918ba0d",
-			"e5210f12786811d3f4b7959d0538ae2c31dbe7106fc03c3efc4cd549c715a493",
-			"95cbde9476e8907d7aade45cb4b873f88b595a68799fa152e6f8f7647aac7957",
-		},
-	}
-	for v, _ in test_vectors {
-		scalar := _decode_hex32(v.scalar)
-		point := _decode_hex32(v.point)
-
-		derived_point: [x25519.POINT_SIZE]byte
-		x25519.scalarmult(derived_point[:], scalar[:], point[:])
-		derived_point_str := hex_string(derived_point[:])
-
-		expect(t, derived_point_str == v.product, fmt.tprintf("Expected %s for %s * %s, but got %s instead", v.product, v.scalar, v.point, derived_point_str))
-
-		// Abuse the test vectors to sanity-check the scalar-basepoint multiply.
-		p1, p2: [x25519.POINT_SIZE]byte
-		x25519.scalarmult_basepoint(p1[:], scalar[:])
-		x25519.scalarmult(p2[:], scalar[:], _BASE_POINT[:])
-		p1_str, p2_str := hex_string(p1[:]), hex_string(p2[:])
-		expect(t, p1_str == p2_str, fmt.tprintf("Expected %s for %s * basepoint, but got %s instead", p2_str, v.scalar, p1_str))
-	}
-
-    // TODO/tests: Run the wycheproof test vectors, once I figure out
-    // how to work with JSON.
-}
-
-@(test)
-test_rand_bytes :: proc(t: ^testing.T) {
-	log(t, "Testing rand_bytes")
-
-	if ODIN_OS != .Linux {
-		log(t, "rand_bytes not supported - skipping")
-		return
-	}
-
-	allocator := context.allocator
-
-	buf := make([]byte, 1 << 25, allocator)
-	defer delete(buf)
-
-	// Testing a CSPRNG for correctness is incredibly involved and
-	// beyond the scope of an implementation that offloads
-	// responsibility for correctness to the OS.
-	//
-	// Just attempt to randomize a sufficiently large buffer, where
-	// sufficiently large is:
-	//  * Larger than the maximum getentropy request size (256 bytes).
-	//  * Larger than the maximum getrandom request size (2^25 - 1 bytes).
-	//
-	// While theoretically non-deterministic, if this fails, chances
-	// are the CSPRNG is busted.
-	seems_ok := false
-	for i := 0; i < 256; i = i + 1 {
-		mem.zero_explicit(raw_data(buf), len(buf))
-		crypto.rand_bytes(buf)
-
-		if buf[0] != 0 && buf[len(buf)-1] != 0 {
-			seems_ok = true
-			break
-		}
-	}
-
-	expect(t, seems_ok, "Expected to randomize the head and tail of the buffer within a handful of attempts")
-}
-
-@(test)
-bench_modern :: proc(t: ^testing.T) {
-	fmt.println("Starting benchmarks:")
-
-	bench_chacha20(t)
-	bench_poly1305(t)
-	bench_chacha20poly1305(t)
-	bench_x25519(t)
-}
-
-_setup_sized_buf :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
-	assert(options != nil)
-
-	options.input = make([]u8, options.bytes, allocator)
-	return nil if len(options.input) == options.bytes else .Allocation_Error
-}
-
-_teardown_sized_buf :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
-	assert(options != nil)
-
-	delete(options.input)
-	return nil
-}
-
-_benchmark_chacha20 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
-	buf := options.input
-	key := [chacha20.KEY_SIZE]byte{
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-	}
-	nonce := [chacha20.NONCE_SIZE]byte{
-		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-		0x00, 0x00, 0x00, 0x00,
-	}
-
-	ctx: chacha20.Context = ---
-	chacha20.init(&ctx, key[:], nonce[:])
-
-	for _ in 0..=options.rounds {
-		chacha20.xor_bytes(&ctx, buf, buf)
-	}
-	options.count     = options.rounds
-	options.processed = options.rounds * options.bytes
-	return nil
-}
-
-_benchmark_poly1305 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
-	buf := options.input
-	key := [poly1305.KEY_SIZE]byte{
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-	}
-
-	tag: [poly1305.TAG_SIZE]byte = ---
-	for _ in 0..=options.rounds {
-		poly1305.sum(tag[:], buf, key[:])
-	}
-	options.count     = options.rounds
-	options.processed = options.rounds * options.bytes
-	//options.hash      = u128(h)
-	return nil
-}
-
-_benchmark_chacha20poly1305 :: proc(options: ^time.Benchmark_Options, allocator := context.allocator) -> (err: time.Benchmark_Error) {
-	buf := options.input
-	key := [chacha20.KEY_SIZE]byte{
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
-	}
-	nonce := [chacha20.NONCE_SIZE]byte{
-		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-		0x00, 0x00, 0x00, 0x00,
-	}
-
-	tag: [chacha20poly1305.TAG_SIZE]byte = ---
-
-	for _ in 0..=options.rounds {
-		chacha20poly1305.encrypt(buf,tag[:], key[:], nonce[:], nil, buf)
-	}
-	options.count     = options.rounds
-	options.processed = options.rounds * options.bytes
-	return nil
-}
-
-benchmark_print :: proc(name: string, options: ^time.Benchmark_Options) {
-	fmt.printf("\t[%v] %v rounds, %v bytes processed in %v ns\n\t\t%5.3f rounds/s, %5.3f MiB/s\n",
-		name,
-		options.rounds,
-		options.processed,
-		time.duration_nanoseconds(options.duration),
-		options.rounds_per_second,
-		options.megabytes_per_second,
-	)
-}
-
-bench_chacha20 :: proc(t: ^testing.T) {
-	name    := "ChaCha20 64 bytes"
-	options := &time.Benchmark_Options{
-		rounds   = 1_000,
-		bytes    = 64,
-		setup    = _setup_sized_buf,
-		bench    = _benchmark_chacha20,
-		teardown = _teardown_sized_buf,
-	}
-
-	err  := time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-
-	name = "ChaCha20 1024 bytes"
-	options.bytes = 1024
-	err = time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-
-	name = "ChaCha20 65536 bytes"
-	options.bytes = 65536
-	err = time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-}
-
-bench_poly1305 :: proc(t: ^testing.T) {
-	name    := "Poly1305 64 zero bytes"
-	options := &time.Benchmark_Options{
-		rounds   = 1_000,
-		bytes    = 64,
-		setup    = _setup_sized_buf,
-		bench    = _benchmark_poly1305,
-		teardown = _teardown_sized_buf,
-	}
-
-	err  := time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-
-	name = "Poly1305 1024 zero bytes"
-	options.bytes = 1024
-	err = time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-}
-
-bench_chacha20poly1305 :: proc(t: ^testing.T) {
-	name    := "chacha20poly1305 64 bytes"
-	options := &time.Benchmark_Options{
-		rounds   = 1_000,
-		bytes    = 64,
-		setup    = _setup_sized_buf,
-		bench    = _benchmark_chacha20poly1305,
-		teardown = _teardown_sized_buf,
-	}
-
-	err  := time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-
-	name = "chacha20poly1305 1024 bytes"
-	options.bytes = 1024
-	err = time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-
-	name = "chacha20poly1305 65536 bytes"
-	options.bytes = 65536
-	err = time.benchmark(options, context.allocator)
-	expect(t, err == nil, name)
-	benchmark_print(name, options)
-}
-
-bench_x25519 :: proc(t: ^testing.T) {
-	point := _decode_hex32("deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef")
-	scalar := _decode_hex32("cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe")
-	out: [x25519.POINT_SIZE]byte = ---
-
-	iters :: 10000
-	start := time.now()
-	for i := 0; i < iters; i = i + 1 {
-		x25519.scalarmult(out[:], scalar[:], point[:])
-	}
-	elapsed := time.since(start)
-
-	log(t, fmt.tprintf("x25519.scalarmult: ~%f us/op", time.duration_microseconds(elapsed) / iters))
-}

+ 236 - 0
tests/core/crypto/test_crypto_benchmark.odin

@@ -0,0 +1,236 @@
+package test_core_crypto
+
+import "core:encoding/hex"
+import "core:fmt"
+import "core:testing"
+import "core:time"
+
+import "core:crypto/chacha20"
+import "core:crypto/chacha20poly1305"
+import "core:crypto/poly1305"
+import "core:crypto/x25519"
+
+// Cryptographic primitive benchmarks.
+
+@(test)
+bench_crypto :: proc(t: ^testing.T) {
+	fmt.println("Starting benchmarks:")
+
+	bench_chacha20(t)
+	bench_poly1305(t)
+	bench_chacha20poly1305(t)
+	bench_x25519(t)
+}
+
+_setup_sized_buf :: proc(
+	options: ^time.Benchmark_Options,
+	allocator := context.allocator,
+) -> (
+	err: time.Benchmark_Error,
+) {
+	assert(options != nil)
+
+	options.input = make([]u8, options.bytes, allocator)
+	return nil if len(options.input) == options.bytes else .Allocation_Error
+}
+
+_teardown_sized_buf :: proc(
+	options: ^time.Benchmark_Options,
+	allocator := context.allocator,
+) -> (
+	err: time.Benchmark_Error,
+) {
+	assert(options != nil)
+
+	delete(options.input)
+	return nil
+}
+
+_benchmark_chacha20 :: proc(
+	options: ^time.Benchmark_Options,
+	allocator := context.allocator,
+) -> (
+	err: time.Benchmark_Error,
+) {
+	buf := options.input
+	key := [chacha20.KEY_SIZE]byte {
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+	}
+	nonce := [chacha20.NONCE_SIZE]byte {
+		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+		0x00, 0x00, 0x00, 0x00,
+	}
+
+	ctx: chacha20.Context = ---
+	chacha20.init(&ctx, key[:], nonce[:])
+
+	for _ in 0 ..= options.rounds {
+		chacha20.xor_bytes(&ctx, buf, buf)
+	}
+	options.count = options.rounds
+	options.processed = options.rounds * options.bytes
+	return nil
+}
+
+_benchmark_poly1305 :: proc(
+	options: ^time.Benchmark_Options,
+	allocator := context.allocator,
+) -> (
+	err: time.Benchmark_Error,
+) {
+	buf := options.input
+	key := [poly1305.KEY_SIZE]byte {
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+	}
+
+	tag: [poly1305.TAG_SIZE]byte = ---
+	for _ in 0 ..= options.rounds {
+		poly1305.sum(tag[:], buf, key[:])
+	}
+	options.count = options.rounds
+	options.processed = options.rounds * options.bytes
+	//options.hash      = u128(h)
+	return nil
+}
+
+_benchmark_chacha20poly1305 :: proc(
+	options: ^time.Benchmark_Options,
+	allocator := context.allocator,
+) -> (
+	err: time.Benchmark_Error,
+) {
+	buf := options.input
+	key := [chacha20.KEY_SIZE]byte {
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+		0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+	}
+	nonce := [chacha20.NONCE_SIZE]byte {
+		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+		0x00, 0x00, 0x00, 0x00,
+	}
+
+	tag: [chacha20poly1305.TAG_SIZE]byte = ---
+
+	for _ in 0 ..= options.rounds {
+		chacha20poly1305.encrypt(buf, tag[:], key[:], nonce[:], nil, buf)
+	}
+	options.count = options.rounds
+	options.processed = options.rounds * options.bytes
+	return nil
+}
+
+benchmark_print :: proc(name: string, options: ^time.Benchmark_Options) {
+	fmt.printf(
+		"\t[%v] %v rounds, %v bytes processed in %v ns\n\t\t%5.3f rounds/s, %5.3f MiB/s\n",
+		name,
+		options.rounds,
+		options.processed,
+		time.duration_nanoseconds(options.duration),
+		options.rounds_per_second,
+		options.megabytes_per_second,
+	)
+}
+
+bench_chacha20 :: proc(t: ^testing.T) {
+	name := "ChaCha20 64 bytes"
+	options := &time.Benchmark_Options {
+		rounds = 1_000,
+		bytes = 64,
+		setup = _setup_sized_buf,
+		bench = _benchmark_chacha20,
+		teardown = _teardown_sized_buf,
+	}
+
+	err := time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+
+	name = "ChaCha20 1024 bytes"
+	options.bytes = 1024
+	err = time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+
+	name = "ChaCha20 65536 bytes"
+	options.bytes = 65536
+	err = time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+}
+
+bench_poly1305 :: proc(t: ^testing.T) {
+	name := "Poly1305 64 zero bytes"
+	options := &time.Benchmark_Options {
+		rounds = 1_000,
+		bytes = 64,
+		setup = _setup_sized_buf,
+		bench = _benchmark_poly1305,
+		teardown = _teardown_sized_buf,
+	}
+
+	err := time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+
+	name = "Poly1305 1024 zero bytes"
+	options.bytes = 1024
+	err = time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+}
+
+bench_chacha20poly1305 :: proc(t: ^testing.T) {
+	name := "chacha20poly1305 64 bytes"
+	options := &time.Benchmark_Options {
+		rounds = 1_000,
+		bytes = 64,
+		setup = _setup_sized_buf,
+		bench = _benchmark_chacha20poly1305,
+		teardown = _teardown_sized_buf,
+	}
+
+	err := time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+
+	name = "chacha20poly1305 1024 bytes"
+	options.bytes = 1024
+	err = time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+
+	name = "chacha20poly1305 65536 bytes"
+	options.bytes = 65536
+	err = time.benchmark(options, context.allocator)
+	expect(t, err == nil, name)
+	benchmark_print(name, options)
+}
+
+bench_x25519 :: proc(t: ^testing.T) {
+	point_str := "deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"
+	scalar_str := "cafebabecafebabecafebabecafebabecafebabecafebabecafebabecafebabe"
+
+	point, _ := hex.decode(transmute([]byte)(point_str), context.temp_allocator)
+	scalar, _ := hex.decode(transmute([]byte)(scalar_str), context.temp_allocator)
+	out: [x25519.POINT_SIZE]byte = ---
+
+	iters :: 10000
+	start := time.now()
+	for i := 0; i < iters; i = i + 1 {
+		x25519.scalarmult(out[:], scalar[:], point[:])
+	}
+	elapsed := time.since(start)
+
+	log(
+		t,
+		fmt.tprintf("x25519.scalarmult: ~%f us/op", time.duration_microseconds(elapsed) / iters),
+	)
+}

+ 1 - 2
tests/core/encoding/hxa/test_core_hxa.odin

@@ -150,6 +150,7 @@ test_write :: proc(t: ^testing.T) {
 
 
 	required_size := hxa.required_write_size(w_file)
 	required_size := hxa.required_write_size(w_file)
 	buf := make([]u8, required_size)
 	buf := make([]u8, required_size)
+	defer delete(buf)
 
 
 	n, write_err := hxa.write(buf, w_file)
 	n, write_err := hxa.write(buf, w_file)
 	write_e :: hxa.Write_Error.None
 	write_e :: hxa.Write_Error.None
@@ -161,8 +162,6 @@ test_write :: proc(t: ^testing.T) {
 	tc.expect(t, read_err == read_e, fmt.tprintf("%v: read_err %v != %v", #procedure, read_err, read_e))
 	tc.expect(t, read_err == read_e, fmt.tprintf("%v: read_err %v != %v", #procedure, read_err, read_e))
 	defer hxa.file_destroy(file)
 	defer hxa.file_destroy(file)
 
 
-	delete(buf)
-
 	tc.expect(t, file.magic_number == 0x417848, fmt.tprintf("%v: file.magic_number %v != %v",
 	tc.expect(t, file.magic_number == 0x417848, fmt.tprintf("%v: file.magic_number %v != %v",
 															#procedure, file.magic_number, 0x417848))
 															#procedure, file.magic_number, 0x417848))
 	tc.expect(t, file.version == 3, fmt.tprintf("%v: file.version %v != %v", #procedure, file.version, 3))
 	tc.expect(t, file.version == 3, fmt.tprintf("%v: file.version %v != %v", #procedure, file.version, 3))

+ 10 - 2
tests/internal/test_pow.odin

@@ -31,8 +31,16 @@ pow_test :: proc(t: ^testing.T) {
 		{
 		{
 			v1 := math.pow(2, f16(exp))
 			v1 := math.pow(2, f16(exp))
 			v2 := math.pow2_f16(exp)
 			v2 := math.pow2_f16(exp)
-			_v1 := transmute(u16)v1
 			_v2 := transmute(u16)v2
 			_v2 := transmute(u16)v2
+			_v1 := transmute(u16)v1
+
+			when ODIN_OS == .Darwin && ODIN_ARCH == .arm64 {
+				if exp == -25 {
+					testing.logf(t, "skipping known test failure on darwin+arm64, Expected math.pow2_f16(-25) == math.pow(2, -25) (= 0000), got 0001")
+					_v2 = 0
+				}
+			}
+
 			expect(t, _v1 == _v2, fmt.tprintf("Expected math.pow2_f16(%d) == math.pow(2, %d) (= %04x), got %04x", exp, exp, _v1, _v2))
 			expect(t, _v1 == _v2, fmt.tprintf("Expected math.pow2_f16(%d) == math.pow(2, %d) (= %04x), got %04x", exp, exp, _v1, _v2))
 		}
 		}
 	}
 	}
@@ -70,4 +78,4 @@ when ODIN_TEST {
 		fmt.printf("[%v] ", loc)
 		fmt.printf("[%v] ", loc)
 		fmt.printf("log: %v\n", v)
 		fmt.printf("log: %v\n", v)
 	}
 	}
-}
+}

+ 0 - 2
vendor/OpenGL/helpers.odin

@@ -120,7 +120,6 @@ when GL_DEBUG {
 }
 }
 
 
 // Compiling shaders are identical for any shader (vertex, geometry, fragment, tesselation, (maybe compute too))
 // Compiling shaders are identical for any shader (vertex, geometry, fragment, tesselation, (maybe compute too))
-@private
 compile_shader_from_source :: proc(shader_data: string, shader_type: Shader_Type) -> (shader_id: u32, ok: bool) {
 compile_shader_from_source :: proc(shader_data: string, shader_type: Shader_Type) -> (shader_id: u32, ok: bool) {
 	shader_id = CreateShader(cast(u32)shader_type)
 	shader_id = CreateShader(cast(u32)shader_type)
 	length := i32(len(shader_data))
 	length := i32(len(shader_data))
@@ -134,7 +133,6 @@ compile_shader_from_source :: proc(shader_data: string, shader_type: Shader_Type
 }
 }
 
 
 // only used once, but I'd just make a subprocedure(?) for consistency
 // only used once, but I'd just make a subprocedure(?) for consistency
-@private
 create_and_link_program :: proc(shader_ids: []u32, binary_retrievable := false) -> (program_id: u32, ok: bool) {
 create_and_link_program :: proc(shader_ids: []u32, binary_retrievable := false) -> (program_id: u32, ok: bool) {
 	program_id = CreateProgram()
 	program_id = CreateProgram()
 	for id in shader_ids {
 	for id in shader_ids {

+ 3 - 3
vendor/raylib/raylib.odin

@@ -344,7 +344,7 @@ Camera2D :: struct {
 	zoom:     f32,                // Camera zoom (scaling), should be 1.0f by default
 	zoom:     f32,                // Camera zoom (scaling), should be 1.0f by default
 }
 }
 
 
-// Vertex data definning a mesh
+// Vertex data defining a mesh
 // NOTE: Data stored in CPU memory (and GPU)
 // NOTE: Data stored in CPU memory (and GPU)
 Mesh :: struct {
 Mesh :: struct {
 	vertexCount:   c.int,         // Number of vertices stored in arrays
 	vertexCount:   c.int,         // Number of vertices stored in arrays
@@ -1051,8 +1051,8 @@ foreign lib {
 	LoadShader              :: proc(vsFileName, fsFileName: cstring) -> Shader ---                                                        // Load shader from files and bind default locations
 	LoadShader              :: proc(vsFileName, fsFileName: cstring) -> Shader ---                                                        // Load shader from files and bind default locations
 	LoadShaderFromMemory    :: proc(vsCode, fsCode: cstring) -> Shader ---                                                                // Load shader from code strings and bind default locations
 	LoadShaderFromMemory    :: proc(vsCode, fsCode: cstring) -> Shader ---                                                                // Load shader from code strings and bind default locations
 	IsShaderReady           :: proc(shader: Shader) -> bool ---                                                                           // Check if a shader is ready
 	IsShaderReady           :: proc(shader: Shader) -> bool ---                                                                           // Check if a shader is ready
-	GetShaderLocation       :: proc(shader: Shader, uniformName: cstring) -> c.int ---                                                    // Get shader uniform location
-	GetShaderLocationAttrib :: proc(shader: Shader, attribName: cstring) -> c.int ---                                                     // Get shader attribute location
+	GetShaderLocation       :: proc(shader: Shader, uniformName: cstring) -> ShaderLocationIndex ---                                                    // Get shader uniform location
+	GetShaderLocationAttrib :: proc(shader: Shader, attribName: cstring)  -> ShaderLocationIndex ---                                                    // Get shader attribute location
 	SetShaderValue          :: proc(shader: Shader, locIndex: ShaderLocationIndex, value: rawptr, uniformType: ShaderUniformDataType) ---               // Set shader uniform value
 	SetShaderValue          :: proc(shader: Shader, locIndex: ShaderLocationIndex, value: rawptr, uniformType: ShaderUniformDataType) ---               // Set shader uniform value
 	SetShaderValueV         :: proc(shader: Shader, locIndex: ShaderLocationIndex, value: rawptr, uniformType: ShaderUniformDataType, count: c.int) --- // Set shader uniform value vector
 	SetShaderValueV         :: proc(shader: Shader, locIndex: ShaderLocationIndex, value: rawptr, uniformType: ShaderUniformDataType, count: c.int) --- // Set shader uniform value vector
 	SetShaderValueMatrix    :: proc(shader: Shader, locIndex: ShaderLocationIndex, mat: Matrix) ---                                                     // Set shader uniform value (matrix 4x4)
 	SetShaderValueMatrix    :: proc(shader: Shader, locIndex: ShaderLocationIndex, mat: Matrix) ---                                                     // Set shader uniform value (matrix 4x4)

+ 818 - 0
vendor/raylib/raymath.odin

@@ -0,0 +1,818 @@
+package raylib
+
+import c "core:c/libc"
+import "core:math"
+import "core:math/linalg"
+
+EPSILON :: 0.000001
+
+
+//----------------------------------------------------------------------------------
+// Module Functions Definition - Utils math
+//----------------------------------------------------------------------------------
+
+
+// Clamp float value
+@(require_results)
+Clamp :: proc "c" (value: f32, min, max: f32) -> f32 {
+	return clamp(value, min, max)
+}
+
+// Calculate linear interpolation between two floats
+@(require_results)
+Lerp :: proc "c" (start, end: f32, amount: f32) -> f32 {
+	return start*(1-amount) + end*amount
+}
+
+// Normalize input value within input range
+@(require_results)
+Normalize :: proc "c" (value: f32, start, end: f32) -> f32 {
+	return (value - start) / (end - start)
+}
+
+// Remap input value within input range to output range
+@(require_results)
+Remap :: proc "c" (value: f32, inputStart, inputEnd: f32, outputStart, outputEnd: f32) -> f32 {
+	return (value - inputStart)/(inputEnd - inputStart)*(outputEnd - outputStart) + outputStart
+}
+
+// Wrap input value from min to max
+@(require_results)
+Wrap :: proc "c" (value: f32, min, max: f32) -> f32 {
+	return value - (max - min)*math.floor((value - min)/(max - min))
+}
+
+// Check whether two given floats are almost equal
+@(require_results)
+FloatEquals :: proc "c" (x, y: f32) -> bool {
+	return abs(x - y) <= EPSILON*c.fmaxf(1.0, c.fmaxf(abs(x), abs(y)))
+}
+
+
+
+//----------------------------------------------------------------------------------
+// Module Functions Definition - Vector2 math
+//----------------------------------------------------------------------------------
+
+
+// Vector with components value 0.0
+@(require_results, deprecated="Prefer Vector2(0)")
+Vector2Zero :: proc "c" () -> Vector2 {
+	return Vector2(0)
+}
+// Vector with components value 1.0
+@(require_results, deprecated="Prefer Vector2(1)")
+Vector2One :: proc "c" () -> Vector2 {
+	return Vector2(1)
+}
+// Add two vectors (v1 + v2)
+@(require_results, deprecated="Prefer v1 + v2")
+Vector2Add :: proc "c" (v1, v2: Vector2) -> Vector2 {
+	return v1 + v2
+}
+// Add vector and float value
+@(require_results, deprecated="Prefer v + value")
+Vector2AddValue :: proc "c" (v: Vector2, value: f32) -> Vector2 {
+	return v + value
+}
+// Subtract two vectors (v1 - v2)
+@(require_results, deprecated="Prefer a - b")
+Vector2Subtract :: proc "c" (a, b: Vector2) -> Vector2 {
+	return a - b
+}
+// Subtract vector by float value
+@(require_results, deprecated="Prefer v + value")
+Vector2SubtractValue :: proc "c" (v: Vector2, value: f32) -> Vector2 {
+	return v - value
+}
+// Calculate vector length
+@(require_results, deprecated="Prefer linalg.length(v)")
+Vector2Length :: proc "c" (v: Vector2) -> f32 {
+	return linalg.length(v)
+}
+// Calculate vector square length
+@(require_results, deprecated="Prefer linalg.length2(v)")
+Vector2LengthSqr :: proc "c" (v: Vector2) -> f32 {
+	return linalg.length2(v)
+}
+// Calculate two vectors dot product
+@(require_results, deprecated="Prefer linalg.dot(v1, v2)")
+Vector2DotProduct :: proc "c" (v1, v2: Vector2) -> f32 {
+	return linalg.dot(v1, v2)
+}
+// Calculate distance between two vectors
+@(require_results, deprecated="Prefer linalg.distance(v1, v2)")
+Vector2Distance :: proc "c" (v1, v2: Vector2) -> f32 {
+	return linalg.distance(v1, v2)
+}
+// Calculate square distance between two vectors
+@(require_results, deprecated="Prefer linalg.length2(v2-v1)")
+Vector2DistanceSqrt :: proc "c" (v1, v2: Vector2) -> f32 {
+	return linalg.length2(v2-v1)
+}
+// Calculate angle between two vectors
+// NOTE: Angle is calculated from origin point (0, 0)
+@(require_results, deprecated="Prefer linalg.angle_between(v1, v2)")
+Vector2Angle :: proc "c" (v1, v2: Vector2) -> f32 {
+	return linalg.angle_between(v1, v2)
+}
+
+// Calculate angle defined by a two vectors line
+// NOTE: Parameters need to be normalized
+// Current implementation should be aligned with glm::angle
+@(require_results)
+Vector2LineAngle :: proc "c" (start, end: Vector2) -> f32 {
+	// TODO(10/9/2023): Currently angles move clockwise, determine if this is wanted behavior
+	return -math.atan2(end.y - start.y, end.x - start.x)
+}
+
+// Scale vector (multiply by value)
+@(require_results, deprecated="Prefer v * scale")
+Vector2Scale :: proc "c" (v: Vector2, scale: f32) -> Vector2 {
+	return v * scale
+}
+// Multiply vector by vector
+@(require_results, deprecated="Prefer v1 * v2")
+Vector2Multiply :: proc "c" (v1, v2: Vector2) -> Vector2 {
+	return v1 * v2
+}
+// Negate vector
+@(require_results, deprecated="Prefer -v")
+Vector2Negate :: proc "c" (v: Vector2) -> Vector2 {
+	return -v
+}
+// Divide vector by vector
+@(require_results, deprecated="Prefer v1 / v2")
+Vector2Divide :: proc "c" (v1, v2: Vector2) -> Vector2 {
+	return v1 / v2
+}
+// Normalize provided vector
+@(require_results, deprecated="Prefer linalg.normalize0(v)")
+Vector2Normalize :: proc "c" (v: Vector2) -> Vector2 {
+	return linalg.normalize0(v)
+}
+// Transforms a Vector2 by a given Matrix
+@(require_results)
+Vector2Transform :: proc "c" (v: Vector2, m: Matrix) -> Vector2 {
+	v4 := Vector4{v.x, v.y, 0, 0}
+	return (m * v4).xy
+}
+// Calculate linear interpolation between two vectors
+@(require_results, deprecated="Prefer = linalg.lerp(v1, v2, amount)")
+Vector2Lerp :: proc "c" (v1, v2: Vector2, amount: f32) -> Vector2 {
+	return linalg.lerp(v1, v2, amount)
+}
+// Calculate reflected vector to normal
+@(require_results, deprecated="Prefer = linalg.reflect(v, normal)")
+Vector2Reflect :: proc "c" (v, normal: Vector2) -> Vector2 {
+	return linalg.reflect(v, normal)
+}
+// Rotate vector by angle
+@(require_results)
+Vector2Rotate :: proc "c" (v: Vector2, angle: f32) -> Vector2 {
+	c, s := math.cos(angle), math.sin(angle)
+
+	return Vector2{
+		v.x*c - v.y*s,
+		v.x*s + v.y*c,
+	}
+}
+
+// Move Vector towards target
+@(require_results)
+Vector2MoveTowards :: proc "c" (v, target: Vector2, maxDistance: f32) -> Vector2 {
+	dv := target - v
+	value := linalg.dot(dv, dv)
+
+	if value == 0 || (maxDistance >= 0 && value <= maxDistance*maxDistance) {
+		return target
+	}
+
+	dist := math.sqrt(value)
+	return v + dv/dist*maxDistance
+}
+
+// Invert the given vector
+@(require_results, deprecated="Prefer 1.0/v")
+Vector2Invert :: proc "c" (v: Vector2) -> Vector2 {
+	return 1.0/v
+}
+
+// Clamp the components of the vector between
+// min and max values specified by the given vectors
+@(require_results)
+Vector2Clamp :: proc "c" (v: Vector2, min, max: Vector2) -> Vector2 {
+	return Vector2{
+		clamp(v.x, min.x, max.x),
+		clamp(v.y, min.y, max.y),
+	}
+}
+
+// Clamp the magnitude of the vector between two min and max values
+@(require_results)
+Vector2ClampValue :: proc "c" (v: Vector2, min, max: f32) -> Vector2 {
+	result := v
+
+	length := linalg.dot(v, v)
+	if length > 0 {
+		length = math.sqrt(length)
+		scale := f32(1)
+		if length < min {
+			scale = min/length
+		} else if length > max {
+			scale = max/length
+		}
+		result = v*scale
+	}
+	return result
+}
+
+@(require_results)
+Vector2Equals :: proc "c" (p, q: Vector2) -> bool {
+	return FloatEquals(p.x, q.x) &&
+	       FloatEquals(p.y, q.y)
+}
+
+
+
+//----------------------------------------------------------------------------------
+// Module Functions Definition - Vector3 math
+//----------------------------------------------------------------------------------
+
+
+// Vector with components value 0.0
+@(require_results, deprecated="Prefer Vector3(0)")
+Vector3Zero :: proc "c" () -> Vector3 {
+	return Vector3(0)
+}
+// Vector with components value 1.0
+@(require_results, deprecated="Prefer Vector3(1)")
+Vector3One :: proc "c" () -> Vector3 {
+	return Vector3(1)
+}
+// Add two vectors (v1 + v2)
+@(require_results, deprecated="Prefer v1 + v2")
+Vector3Add :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return v1 + v2
+}
+// Add vector and float value
+@(require_results, deprecated="Prefer v + value")
+Vector3AddValue :: proc "c" (v: Vector3, value: f32) -> Vector3 {
+	return v + value
+}
+// Subtract two vectors (v1 - v2)
+@(require_results, deprecated="Prefer a - b")
+Vector3Subtract :: proc "c" (a, b: Vector3) -> Vector3 {
+	return a - b
+}
+// Subtract vector by float value
+@(require_results, deprecated="Prefer v + value")
+Vector3SubtractValue :: proc "c" (v: Vector3, value: f32) -> Vector3 {
+	return v - value
+}
+// Calculate vector length
+@(require_results, deprecated="Prefer linalg.length(v)")
+Vector3Length :: proc "c" (v: Vector3) -> f32 {
+	return linalg.length(v)
+}
+// Calculate vector square length
+@(require_results, deprecated="Prefer linalg.length2(v)")
+Vector3LengthSqr :: proc "c" (v: Vector3) -> f32 {
+	return linalg.length2(v)
+}
+// Calculate two vectors dot product
+@(require_results, deprecated="Prefer linalg.dot(v1, v2)")
+Vector3DotProduct :: proc "c" (v1, v2: Vector3) -> f32 {
+	return linalg.dot(v1, v2)
+}
+// Calculate two vectors dot product
+@(require_results, deprecated="Prefer linalg.cross(v1, v2)")
+Vector3CrossProduct :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return linalg.cross(v1, v2)
+}
+// Calculate distance between two vectors
+@(require_results, deprecated="Prefer linalg.distance(v1, v2)")
+Vector3Distance :: proc "c" (v1, v2: Vector3) -> f32 {
+	return linalg.distance(v1, v2)
+}
+// Calculate square distance between two vectors
+@(require_results, deprecated="Prefer linalg.length2(v2-v1)")
+Vector3DistanceSqrt :: proc "c" (v1, v2: Vector3) -> f32 {
+	return linalg.length2(v2-v1)
+}
+// Calculate angle between two vectors
+// NOTE: Angle is calculated from origin point (0, 0)
+@(require_results, deprecated="Prefer linalg.angle_between(v1, v2)")
+Vector3Angle :: proc "c" (v1, v2: Vector3) -> f32 {
+	return linalg.angle_between(v1, v2)
+}
+
+// Calculate angle defined by a two vectors line
+// NOTE: Parameters need to be normalized
+// Current implementation should be aligned with glm::angle
+@(require_results)
+Vector3LineAngle :: proc "c" (start, end: Vector3) -> f32 {
+	// TODO(10/9/2023): Currently angles move clockwise, determine if this is wanted behavior
+	return -math.atan2(end.y - start.y, end.x - start.x)
+}
+
+// Scale vector (multiply by value)
+@(require_results, deprecated="Prefer v * scale")
+Vector3Scale :: proc "c" (v: Vector3, scale: f32) -> Vector3 {
+	return v * scale
+}
+// Multiply vector by vector
+@(require_results, deprecated="Prefer v1 * v2")
+Vector3Multiply :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return v1 * v2
+}
+// Negate vector
+@(require_results, deprecated="Prefer -v")
+Vector3Negate :: proc "c" (v: Vector3) -> Vector3 {
+	return -v
+}
+// Divide vector by vector
+@(require_results, deprecated="Prefer v1 / v2")
+Vector3Divide :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return v1 / v2
+}
+// Normalize provided vector
+@(require_results, deprecated="Prefer linalg.normalize0(v)")
+Vector3Normalize :: proc "c" (v: Vector3) -> Vector3 {
+	return linalg.normalize0(v)
+}
+
+// Calculate the projection of the vector v1 on to v2
+@(require_results)
+Vector3Project :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return linalg.projection(v1, v2)
+}
+
+// Calculate the rejection  of the vector v1 on to v2
+@(require_results)
+Vector3Reject :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	mag := linalg.dot(v1, v2)/linalg.dot(v2, v2)
+	return v1 - v2*mag
+}
+
+// Orthonormalize provided vectors
+// Makes vectors normalized and orthogonal to each other
+// Gram-Schmidt function implementation
+Vector3OrthoNormalize :: proc "c" (v1, v2: ^Vector3) {
+	v1^ = linalg.normalize0(v1^)
+	v3 := linalg.normalize0(linalg.cross(v1^, v2^))
+	v2^ = linalg.cross(v3, v1^)
+}
+
+// Transform a vector by quaternion rotation
+@(require_results, deprecated="Prefer linalg.mul(q, v")
+Vector3RotateByQuaternion :: proc "c" (v: Vector3, q: Quaternion) -> Vector3 {
+	return linalg.mul(q, v)
+}
+
+// Rotates a vector around an axis
+@(require_results)
+Vector3RotateByAxisAngle :: proc "c" (v: Vector3, axis: Vector3, angle: f32) -> Vector3 {
+	axis, angle := axis, angle
+
+	axis = linalg.normalize0(axis)
+
+	angle *= 0.5
+	a := math.sin(angle)
+	b := axis.x*a
+	c := axis.y*a
+	d := axis.z*a
+	a = math.cos(angle)
+	w := Vector3{b, c, d}
+
+	wv := linalg.cross(w, v)
+	wwv := linalg.cross(w, wv)
+
+	a *= 2
+	wv *= a
+
+	wwv *= 2
+
+	return v + wv + wwv
+
+}
+
+// Transforms a Vector3 by a given Matrix
+@(require_results)
+Vector3Transform :: proc "c" (v: Vector3, m: Matrix) -> Vector3 {
+	v4 := Vector4{v.x, v.y, v.z, 0}
+	return (m * v4).xyz
+}
+// Calculate linear interpolation between two vectors
+@(require_results, deprecated="Prefer = linalg.lerp(v1, v2, amount)")
+Vector3Lerp :: proc "c" (v1, v2: Vector3, amount: f32) -> Vector3 {
+	return linalg.lerp(v1, v2, amount)
+}
+// Calculate reflected vector to normal
+@(require_results, deprecated="Prefer = linalg.reflect(v, normal)")
+Vector3Reflect :: proc "c" (v, normal: Vector3) -> Vector3 {
+	return linalg.reflect(v, normal)
+}
+// Compute the direction of a refracted ray
+// v: normalized direction of the incoming ray
+// n: normalized normal vector of the interface of two optical media
+// r: ratio of the refractive index of the medium from where the ray comes
+//    to the refractive index of the medium on the other side of the surface
+@(require_results, deprecated="Prefer = linalg.refract(v, n, r)")
+Vector3Refract :: proc "c" (v, n: Vector3, r: f32) -> Vector3 {
+	return linalg.refract(v, n, r)
+}
+
+// Move Vector towards target
+@(require_results)
+Vector3MoveTowards :: proc "c" (v, target: Vector3, maxDistance: f32) -> Vector3 {
+	dv := target - v
+	value := linalg.dot(dv, dv)
+
+	if value == 0 || (maxDistance >= 0 && value <= maxDistance*maxDistance) {
+		return target
+	}
+
+	dist := math.sqrt(value)
+	return v + dv/dist*maxDistance
+}
+
+// Invert the given vector
+@(require_results, deprecated="Prefer 1.0/v")
+Vector3Invert :: proc "c" (v: Vector3) -> Vector3 {
+	return 1.0/v
+}
+
+// Clamp the components of the vector between
+// min and max values specified by the given vectors
+@(require_results)
+Vector3Clamp :: proc "c" (v: Vector3, min, max: Vector3) -> Vector3 {
+	return Vector3{
+		clamp(v.x, min.x, max.x),
+		clamp(v.y, min.y, max.y),
+		clamp(v.z, min.z, max.z),
+	}
+}
+
+// Clamp the magnitude of the vector between two min and max values
+@(require_results)
+Vector3ClampValue :: proc "c" (v: Vector3, min, max: f32) -> Vector3 {
+	result := v
+
+	length := linalg.dot(v, v)
+	if length > 0 {
+		length = math.sqrt(length)
+		scale := f32(1)
+		if length < min {
+			scale = min/length
+		} else if length > max {
+			scale = max/length
+		}
+		result = v*scale
+	}
+	return result
+}
+
+@(require_results)
+Vector3Equals :: proc "c" (p, q: Vector3) -> bool {
+	return FloatEquals(p.x, q.x) &&
+	       FloatEquals(p.y, q.y) &&
+	       FloatEquals(p.z, q.z)
+}
+
+
+@(require_results, deprecated="Prefer linalg.min(v1, v2)")
+Vector3Min :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return linalg.min(v1, v2)
+}
+
+@(require_results, deprecated="Prefer linalg.max(v1, v2)")
+Vector3Max :: proc "c" (v1, v2: Vector3) -> Vector3 {
+	return linalg.max(v1, v2)
+}
+
+
+// Compute barycenter coordinates (u, v, w) for point p with respect to triangle (a, b, c)
+// NOTE: Assumes P is on the plane of the triangle
+@(require_results)
+Vector3Barycenter :: proc "c" (p: Vector3, a, b, c: Vector3) -> (result: Vector3) {
+	v0 := b - a
+	v1 := c - a
+	v2 := p - a
+	d00 := linalg.dot(v0, v0)
+	d01 := linalg.dot(v0, v1)
+	d11 := linalg.dot(v1, v1)
+	d20 := linalg.dot(v2, v0)
+	d21 := linalg.dot(v2, v1)
+
+	denom := d00*d11 - d01*d01
+
+	result.y = (d11*d20 - d01*d21)/denom
+	result.z = (d00*d21 - d01*d20)/denom
+	result.x = 1 - (result.z + result.y)
+
+	return result
+}
+
+
+// Projects a Vector3 from screen space into object space
+@(require_results)
+Vector3Unproject :: proc "c" (source: Vector3, projection: Matrix, view: Matrix) -> Vector3 {
+	matViewProj := view * projection
+
+	matViewProjInv := linalg.inverse(matViewProj)
+
+	quat: Quaternion
+	quat.x = source.x
+	quat.y = source.z
+	quat.z = source.z
+	quat.w = 1
+
+	qtransformed := QuaternionTransform(quat, matViewProjInv)
+
+	return Vector3{qtransformed.x/qtransformed.w, qtransformed.y/qtransformed.w, qtransformed.z/qtransformed.w}
+}
+
+
+
+//----------------------------------------------------------------------------------
+// Module Functions Definition - Matrix math
+//----------------------------------------------------------------------------------
+
+// Compute matrix determinant
+@(require_results, deprecated="Prefer linalg.determinant(mat)")
+MatrixDeterminant :: proc "c" (mat: Matrix) -> f32 {
+	return linalg.determinant(mat)
+}
+
+// Get the trace of the matrix (sum of the values along the diagonal)
+@(require_results, deprecated="Prefer linalg.trace(mat)")
+MatrixTrace :: proc "c" (mat: Matrix) -> f32 {
+	return linalg.trace(mat)
+}
+
+// Transposes provided matrix
+@(require_results, deprecated="Prefer linalg.transpose(mat)")
+MatrixTranspose :: proc "c" (mat: Matrix) -> Matrix {
+	return linalg.transpose(mat)
+}
+
+// Invert provided matrix
+@(require_results, deprecated="Prefer linalg.inverse(mat)")
+MatrixInvert :: proc "c" (mat: Matrix) -> Matrix {
+	return linalg.inverse(mat)
+}
+
+// Get identity matrix
+@(require_results, deprecated="Prefer Matrix(1)")
+MatrixIdentity :: proc "c" () -> Matrix {
+	return Matrix(1)
+}
+
+// Add two matrices
+@(require_results, deprecated="Prefer left + right")
+MatrixAdd :: proc "c" (left, right: Matrix) -> Matrix {
+	return left + right
+}
+
+// Subtract two matrices (left - right)
+@(require_results, deprecated="Prefer left - right")
+MatrixSubtract :: proc "c" (left, right: Matrix) -> Matrix {
+	return left - right
+}
+
+// Get two matrix multiplication
+// NOTE: When multiplying matrices... the order matters!
+@(require_results, deprecated="Prefer left * right")
+MatrixMultiply :: proc "c" (left, right: Matrix) -> Matrix {
+	return left * right
+}
+
+// Get translation matrix
+@(require_results)
+MatrixTranslate :: proc "c" (x, y, z: f32) -> Matrix {
+	return linalg.matrix4_translate(Vector3{x, y, z})
+}
+
+// Create rotation matrix from axis and angle
+// NOTE: Angle should be provided in radians
+@(require_results)
+MatrixRotate :: proc "c" (axis: Vector3, angle: f32) -> Matrix {
+	return linalg.matrix4_rotate(angle, axis)
+}
+
+// Get x-rotation matrix
+// NOTE: Angle must be provided in radians
+@(require_results)
+MatrixRotateX :: proc "c" (angle: f32) -> Matrix {
+	return linalg.matrix4_rotate(angle, Vector3{1, 0, 0})
+}
+
+// Get y-rotation matrix
+// NOTE: Angle must be provided in radians
+@(require_results)
+MatrixRotateY :: proc "c" (angle: f32) -> Matrix {
+	return linalg.matrix4_rotate(angle, Vector3{0, 1, 0})
+}
+
+// Get z-rotation matrix
+// NOTE: Angle must be provided in radians
+@(require_results)
+MatrixRotateZ :: proc "c" (angle: f32) -> Matrix {
+	return linalg.matrix4_rotate(angle, Vector3{0, 0, 1})
+}
+
+// Get xyz-rotation matrix
+// NOTE: Angle must be provided in radians
+@(require_results)
+MatrixRotateXYZ :: proc "c" (angle: Vector3) -> Matrix {
+	return linalg.matrix4_from_euler_angles_xyz(angle.x, angle.y, angle.z)
+}
+
+// Get zyx-rotation matrix
+// NOTE: Angle must be provided in radians
+@(require_results)
+MatrixRotateZYX :: proc "c" (angle: Vector3) -> Matrix {
+	return linalg.matrix4_from_euler_angles_zyx(angle.x, angle.y, angle.z)
+}
+
+
+// Get scaling matrix
+@(require_results)
+MatrixScale :: proc "c" (x, y, z: f32) -> Matrix {
+	return linalg.matrix4_scale(Vector3{x, y, z})
+}
+
+// Get orthographic projection matrix
+@(require_results)
+MatrixOrtho :: proc "c" (left, right, bottom, top, near, far: f32) -> Matrix {
+	return linalg.matrix_ortho3d(left, right, bottom, top, near, far)
+}
+
+// Get perspective projection matrix
+// NOTE: Fovy angle must be provided in radians
+@(require_results)
+MatrixPerspective :: proc "c" (fovY, aspect, nearPlane, farPlane: f32) -> Matrix {
+	return linalg.matrix4_perspective(fovY, aspect, nearPlane, farPlane)
+}
+// Get camera look-at matrix (view matrix)
+@(require_results)
+MatrixLookAt :: proc "c" (eye, target, up: Vector3) -> Matrix {
+	return linalg.matrix4_look_at(eye, target, up)
+}
+
+// Get float array of matrix data
+@(require_results)
+MatrixToFloatV :: proc "c" (mat: Matrix) -> [16]f32 {
+	return transmute([16]f32)mat
+}
+
+
+//----------------------------------------------------------------------------------
+// Module Functions Definition - Quaternion math
+//----------------------------------------------------------------------------------
+
+
+
+// Add two quaternions
+@(require_results, deprecated="Prefer q1 + q2")
+QuaternionAdd :: proc "c" (q1, q2: Quaternion) -> Quaternion {
+	return q1 + q2
+}
+// Add quaternion and float value
+@(require_results)
+QuaternionAddValue :: proc "c" (q: Quaternion, add: f32) -> Quaternion {
+	return q + Quaternion(add)
+}
+// Subtract two quaternions
+@(require_results, deprecated="Prefer q1 - q2")
+QuaternionSubtract :: proc "c" (q1, q2: Quaternion) -> Quaternion {
+	return q1 - q2
+}
+// Subtract quaternion and float value
+@(require_results)
+QuaternionSubtractValue :: proc "c" (q: Quaternion, sub: f32) -> Quaternion {
+	return q - Quaternion(sub)
+}
+// Get identity quaternion
+@(require_results, deprecated="Prefer Quaternion(1)")
+QuaternionIdentity :: proc "c" () -> Quaternion {
+	return 1
+}
+// Computes the length of a quaternion
+@(require_results, deprecated="Prefer abs(q)")
+QuaternionLength :: proc "c" (q: Quaternion) -> f32 {
+	return abs(q)
+}
+// Normalize provided quaternion
+@(require_results, deprecated="Prefer linalg.normalize0(q)")
+QuaternionNormalize :: proc "c" (q: Quaternion) -> Quaternion {
+	return linalg.normalize0(q)
+}
+// Invert provided quaternion
+@(require_results, deprecated="Prefer 1/q")
+QuaternionInvert :: proc "c" (q: Quaternion) -> Quaternion {
+	return 1/q
+}
+// Calculate two quaternion multiplication
+@(require_results, deprecated="Prefer q1 * q2")
+QuaternionMultiply :: proc "c" (q1, q2: Quaternion) -> Quaternion {
+	return q1 * q2
+}
+// Scale quaternion by float value
+@(require_results)
+QuaternionScale :: proc "c" (q: Quaternion, mul: f32) -> Quaternion {
+	return q * Quaternion(mul)
+}
+// Divide two quaternions
+@(require_results, deprecated="Prefer q1 / q2")
+QuaternionDivide :: proc "c" (q1, q2: Quaternion) -> Quaternion {
+	return q1 / q2
+}
+// Calculate linear interpolation between two quaternions
+@(require_results)
+QuaternionLerp :: proc "c" (q1, q2: Quaternion, amount: f32) -> (q3: Quaternion) {
+	q3.x = q1.x + (q2.x-q1.x)*amount
+	q3.y = q1.y + (q2.y-q1.y)*amount
+	q3.z = q1.z + (q2.z-q1.z)*amount
+	q3.w = q1.w + (q2.w-q1.w)*amount
+	return
+}
+// Calculate slerp-optimized interpolation between two quaternions
+@(require_results)
+QuaternionNlerp :: proc "c" (q1, q2: Quaternion, amount: f32) -> Quaternion {
+	return linalg.quaternion_nlerp(q1, q2, amount)
+}
+// Calculates spherical linear interpolation between two quaternions
+@(require_results)
+QuaternionSlerp :: proc "c" (q1, q2: Quaternion, amount: f32) -> Quaternion {
+	return linalg.quaternion_slerp(q1, q2, amount)
+}
+// Calculate quaternion based on the rotation from one vector to another
+@(require_results)
+QuaternionFromVector3ToVector3 :: proc "c" (from, to: Vector3) -> Quaternion {
+	return linalg.quaternion_between_two_vector3(from, to)
+}
+// Get a quaternion for a given rotation matrix
+@(require_results)
+QuaternionFromMatrix :: proc "c" (mat: Matrix) -> Quaternion {
+	return linalg.quaternion_from_matrix4(mat)
+}
+// Get a matrix for a given quaternion
+@(require_results)
+QuaternionToMatrix :: proc "c" (q: Quaternion) -> Matrix {
+	return linalg.matrix4_from_quaternion(q)
+}
+// Get rotation quaternion for an angle and axis NOTE: Angle must be provided in radians
+@(require_results)
+QuaternionFromAxisAngle :: proc "c" (axis: Vector3, angle: f32) -> Quaternion {
+	return linalg.quaternion_angle_axis(angle, axis)
+}
+// Get the rotation angle and axis for a given quaternion
+@(require_results)
+QuaternionToAxisAngle :: proc "c" (q: Quaternion) -> (outAxis: Vector3, outAngle: f32) {
+	outAngle, outAxis = linalg.angle_axis_from_quaternion(q)
+	return
+}
+// Get the quaternion equivalent to Euler angles NOTE: Rotation order is ZYX
+@(require_results)
+QuaternionFromEuler :: proc "c" (pitch, yaw, roll: f32) -> Quaternion {
+	return linalg.quaternion_from_pitch_yaw_roll(pitch, yaw, roll)
+}
+// Get the Euler angles equivalent to quaternion (roll, pitch, yaw) NOTE: Angles are returned in a Vector3 struct in radians
+@(require_results)
+QuaternionToEuler :: proc "c" (q: Quaternion) -> Vector3 {
+	result: Vector3
+
+	// Roll (x-axis rotation)
+	x0 := 2.0*(q.w*q.x + q.y*q.z)
+	x1 := 1.0 - 2.0*(q.x*q.x + q.y*q.y)
+	result.x = math.atan2(x0, x1)
+
+	// Pitch (y-axis rotation)
+	y0 := 2.0*(q.w*q.y - q.z*q.x)
+	y0 =  1.0 if y0 >  1.0 else y0
+	y0 = -1.0 if y0 < -1.0 else y0
+	result.y = math.asin(y0)
+
+	// Yaw (z-axis rotation)
+	z0 := 2.0*(q.w*q.z + q.x*q.y)
+	z1 := 1.0 - 2.0*(q.y*q.y + q.z*q.z)
+	result.z = math.atan2(z0, z1)
+
+	return result
+}
+// Transform a quaternion given a transformation matrix
+@(require_results)
+QuaternionTransform :: proc "c" (q: Quaternion, mat: Matrix) -> Quaternion {
+	v := mat * transmute(Vector4)q
+	return transmute(Quaternion)v
+}
+// Check whether two given quaternions are almost equal
+@(require_results)
+QuaternionEquals :: proc "c" (p, q: Quaternion) -> bool {
+	return FloatEquals(p.x, q.x) &&
+	       FloatEquals(p.y, q.y) &&
+	       FloatEquals(p.z, q.z) &&
+	       FloatEquals(p.w, q.w)
+}

+ 2 - 2
vendor/wasm/WebGL/webgl.odin

@@ -64,7 +64,7 @@ foreign webgl {
 	ClearColor    :: proc(r, g, b, a: f32) ---
 	ClearColor    :: proc(r, g, b, a: f32) ---
 	ClearDepth    :: proc(x: Enum) ---
 	ClearDepth    :: proc(x: Enum) ---
 	ClearStencil  :: proc(x: Enum) ---
 	ClearStencil  :: proc(x: Enum) ---
-	ClearMask     :: proc(r, g, b, a: bool) ---
+	ColorMask     :: proc(r, g, b, a: bool) ---
 	CompileShader :: proc(shader: Shader) ---
 	CompileShader :: proc(shader: Shader) ---
 	
 	
 	CompressedTexImage2D    :: proc(target: Enum, level: i32, internalformat: Enum, width, height: i32, border: i32, imageSize: int, data: rawptr) ---
 	CompressedTexImage2D    :: proc(target: Enum, level: i32, internalformat: Enum, width, height: i32, border: i32, imageSize: int, data: rawptr) ---
@@ -266,4 +266,4 @@ TexImage2DSlice :: proc "contextless" (target: Enum, level: i32, internalformat:
 }
 }
 TexSubImage2DSlice :: proc "contextless" (target: Enum, level: i32, xoffset, yoffset, width, height: i32, format, type: Enum, slice: $S/[]$E) {
 TexSubImage2DSlice :: proc "contextless" (target: Enum, level: i32, xoffset, yoffset, width, height: i32, format, type: Enum, slice: $S/[]$E) {
 	TexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, len(slice)*size_of(E), raw_data(slice))
 	TexSubImage2D(target, level, xoffset, yoffset, width, height, format, type, len(slice)*size_of(E), raw_data(slice))
-}
+}

+ 7 - 6
vendor/wasm/WebGL/webgl2.odin

@@ -2,6 +2,7 @@ package webgl
 
 
 foreign import "webgl2"
 foreign import "webgl2"
 
 
+import "base:intrinsics"
 import glm "core:math/linalg/glsl"
 import glm "core:math/linalg/glsl"
 
 
 Query             :: distinct u32
 Query             :: distinct u32
@@ -135,42 +136,42 @@ UniformMatrix3x2fv :: proc "contextless" (location: i32, m: glm.mat3x2) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix3x2fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix3x2fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix3x2fv(location, &array[0])
 	_UniformMatrix3x2fv(location, &array[0])
 }
 }
 UniformMatrix4x2fv :: proc "contextless" (location: i32, m: glm.mat4x2) {
 UniformMatrix4x2fv :: proc "contextless" (location: i32, m: glm.mat4x2) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix4x2fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix4x2fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix4x2fv(location, &array[0])
 	_UniformMatrix4x2fv(location, &array[0])
 }
 }
 UniformMatrix2x3fv :: proc "contextless" (location: i32, m: glm.mat2x3) {
 UniformMatrix2x3fv :: proc "contextless" (location: i32, m: glm.mat2x3) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix2x3fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix2x3fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix2x3fv(location, &array[0])
 	_UniformMatrix2x3fv(location, &array[0])
 }
 }
 UniformMatrix4x3fv :: proc "contextless" (location: i32, m: glm.mat4x3) {
 UniformMatrix4x3fv :: proc "contextless" (location: i32, m: glm.mat4x3) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix4x3fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix4x3fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix4x3fv(location, &array[0])
 	_UniformMatrix4x3fv(location, &array[0])
 }
 }
 UniformMatrix2x4fv :: proc "contextless" (location: i32, m: glm.mat2x4) {
 UniformMatrix2x4fv :: proc "contextless" (location: i32, m: glm.mat2x4) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix2x4fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix2x4fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix2x4fv(location, &array[0])
 	_UniformMatrix2x4fv(location, &array[0])
 }
 }
 UniformMatrix3x4fv :: proc "contextless" (location: i32, m: glm.mat3x4) {
 UniformMatrix3x4fv :: proc "contextless" (location: i32, m: glm.mat3x4) {
 	foreign webgl2 {
 	foreign webgl2 {
 		_UniformMatrix3x4fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 		_UniformMatrix3x4fv :: proc "contextless" (location: i32, addr: [^]f32) ---
 	}
 	}
-	array := matrix_flatten(m)
+	array := intrinsics.matrix_flatten(m)
 	_UniformMatrix3x4fv(location, &array[0])
 	_UniformMatrix3x4fv(location, &array[0])
 }
 }