|
@@ -7,8 +7,10 @@ import "core:crypto/_aes/ct64"
|
|
|
import "core:encoding/endian"
|
|
|
import "core:mem"
|
|
|
|
|
|
-// GCM_NONCE_SIZE is the size of the GCM nonce in bytes.
|
|
|
+// GCM_NONCE_SIZE is the default size of the GCM nonce in bytes.
|
|
|
GCM_NONCE_SIZE :: 12
|
|
|
+// GCM_NONCE_SIZE_MAX is the maximum size of the GCM nonce in bytes.
|
|
|
+GCM_NONCE_SIZE_MAX :: 0x2000000000000000 // floor((2^64 - 1) / 8) bits
|
|
|
// GCM_TAG_SIZE is the size of a GCM tag in bytes.
|
|
|
GCM_TAG_SIZE :: _aes.GHASH_TAG_SIZE
|
|
|
|
|
@@ -51,17 +53,19 @@ seal_gcm :: proc(ctx: ^Context_GCM, dst, tag, nonce, aad, plaintext: []byte) {
|
|
|
|
|
|
h: [_aes.GHASH_KEY_SIZE]byte
|
|
|
j0: [_aes.GHASH_BLOCK_SIZE]byte
|
|
|
+ j0_enc: [_aes.GHASH_BLOCK_SIZE]byte
|
|
|
s: [_aes.GHASH_TAG_SIZE]byte
|
|
|
- init_ghash_ct64(ctx, &h, &j0, nonce)
|
|
|
+ init_ghash_ct64(ctx, &h, &j0, &j0_enc, nonce)
|
|
|
|
|
|
// Note: Our GHASH implementation handles appending padding.
|
|
|
ct64.ghash(s[:], h[:], aad)
|
|
|
- gctr_ct64(ctx, dst, &s, plaintext, &h, nonce, true)
|
|
|
- final_ghash_ct64(&s, &h, &j0, len(aad), len(plaintext))
|
|
|
+ gctr_ct64(ctx, dst, &s, plaintext, &h, &j0, true)
|
|
|
+ final_ghash_ct64(&s, &h, &j0_enc, len(aad), len(plaintext))
|
|
|
copy(tag, s[:])
|
|
|
|
|
|
mem.zero_explicit(&h, len(h))
|
|
|
mem.zero_explicit(&j0, len(j0))
|
|
|
+ mem.zero_explicit(&j0_enc, len(j0_enc))
|
|
|
}
|
|
|
|
|
|
// open_gcm authenticates the aad and ciphertext, and decrypts the ciphertext,
|
|
@@ -87,12 +91,13 @@ open_gcm :: proc(ctx: ^Context_GCM, dst, nonce, aad, ciphertext, tag: []byte) ->
|
|
|
|
|
|
h: [_aes.GHASH_KEY_SIZE]byte
|
|
|
j0: [_aes.GHASH_BLOCK_SIZE]byte
|
|
|
+ j0_enc: [_aes.GHASH_BLOCK_SIZE]byte
|
|
|
s: [_aes.GHASH_TAG_SIZE]byte
|
|
|
- init_ghash_ct64(ctx, &h, &j0, nonce)
|
|
|
+ init_ghash_ct64(ctx, &h, &j0, &j0_enc, nonce)
|
|
|
|
|
|
ct64.ghash(s[:], h[:], aad)
|
|
|
- gctr_ct64(ctx, dst, &s, ciphertext, &h, nonce, false)
|
|
|
- final_ghash_ct64(&s, &h, &j0, len(aad), len(ciphertext))
|
|
|
+ gctr_ct64(ctx, dst, &s, ciphertext, &h, &j0, false)
|
|
|
+ final_ghash_ct64(&s, &h, &j0_enc, len(aad), len(ciphertext))
|
|
|
|
|
|
ok := crypto.compare_constant_time(s[:], tag) == 1
|
|
|
if !ok {
|
|
@@ -101,6 +106,7 @@ open_gcm :: proc(ctx: ^Context_GCM, dst, nonce, aad, ciphertext, tag: []byte) ->
|
|
|
|
|
|
mem.zero_explicit(&h, len(h))
|
|
|
mem.zero_explicit(&j0, len(j0))
|
|
|
+ mem.zero_explicit(&j0_enc, len(j0_enc))
|
|
|
mem.zero_explicit(&s, len(s))
|
|
|
|
|
|
return ok
|
|
@@ -119,13 +125,8 @@ gcm_validate_common_slice_sizes :: proc(tag, nonce, aad, text: []byte) {
|
|
|
panic("crypto/aes: invalid GCM tag size")
|
|
|
}
|
|
|
|
|
|
- // The specification supports nonces in the range [1, 2^64) bits
|
|
|
- // however per NIST SP 800-38D 5.2.1.1:
|
|
|
- //
|
|
|
- // > For IVs, it is recommended that implementations restrict support
|
|
|
- // > to the length of 96 bits, to promote interoperability, efficiency,
|
|
|
- // > and simplicity of design.
|
|
|
- if len(nonce) != GCM_NONCE_SIZE {
|
|
|
+ // The specification supports nonces in the range [1, 2^64) bits.
|
|
|
+ if l := len(nonce); l == 0 || u64(l) >= GCM_NONCE_SIZE_MAX {
|
|
|
panic("crypto/aes: invalid GCM nonce size")
|
|
|
}
|
|
|
|
|
@@ -142,6 +143,7 @@ init_ghash_ct64 :: proc(
|
|
|
ctx: ^Context_GCM,
|
|
|
h: ^[_aes.GHASH_KEY_SIZE]byte,
|
|
|
j0: ^[_aes.GHASH_BLOCK_SIZE]byte,
|
|
|
+ j0_enc: ^[_aes.GHASH_BLOCK_SIZE]byte,
|
|
|
nonce: []byte,
|
|
|
) {
|
|
|
impl := &ctx._impl.(ct64.Context)
|
|
@@ -149,12 +151,25 @@ init_ghash_ct64 :: proc(
|
|
|
// 1. Let H = CIPH(k, 0^128)
|
|
|
ct64.encrypt_block(impl, h[:], h[:])
|
|
|
|
|
|
+ // Define a block, J0, as follows:
|
|
|
+ if l := len(nonce); l == GCM_NONCE_SIZE {
|
|
|
+ // if len(IV) = 96, then let J0 = IV || 0^31 || 1
|
|
|
+ copy(j0[:], nonce)
|
|
|
+ j0[_aes.GHASH_BLOCK_SIZE - 1] = 1
|
|
|
+ } else {
|
|
|
+ // If len(IV) != 96, then let s = 128 ceil(len(IV)/128) - len(IV),
|
|
|
+ // and let J0 = GHASHH(IV || 0^(s+64) || ceil(len(IV))^64).
|
|
|
+ ct64.ghash(j0[:], h[:], nonce)
|
|
|
+
|
|
|
+ tmp: [_aes.GHASH_BLOCK_SIZE]byte
|
|
|
+ endian.unchecked_put_u64be(tmp[8:], u64(l) * 8)
|
|
|
+ ct64.ghash(j0[:], h[:], tmp[:])
|
|
|
+ }
|
|
|
+
|
|
|
// ECB encrypt j0, so that we can just XOR with the tag. In theory
|
|
|
// this could be processed along with the final GCTR block, to
|
|
|
// potentially save a call to AES-ECB, but... just use AES-NI.
|
|
|
- copy(j0[:], nonce)
|
|
|
- j0[_aes.GHASH_BLOCK_SIZE - 1] = 1
|
|
|
- ct64.encrypt_block(impl, j0[:], j0[:])
|
|
|
+ ct64.encrypt_block(impl, j0_enc[:], j0[:])
|
|
|
}
|
|
|
|
|
|
@(private = "file")
|
|
@@ -182,7 +197,7 @@ gctr_ct64 :: proc(
|
|
|
s: ^[_aes.GHASH_BLOCK_SIZE]byte,
|
|
|
src: []byte,
|
|
|
h: ^[_aes.GHASH_KEY_SIZE]byte,
|
|
|
- nonce: []byte,
|
|
|
+ nonce: ^[_aes.GHASH_BLOCK_SIZE]byte,
|
|
|
is_seal: bool,
|
|
|
) #no_bounds_check {
|
|
|
ct64_inc_ctr32 := #force_inline proc "contextless" (dst: []byte, ctr: u32) -> u32 {
|
|
@@ -190,20 +205,17 @@ gctr_ct64 :: proc(
|
|
|
return ctr + 1
|
|
|
}
|
|
|
|
|
|
- // 2. Define a block J_0 as follows:
|
|
|
- // if len(IV) = 96, then let J0 = IV || 0^31 || 1
|
|
|
- //
|
|
|
- // Note: We only support 96 bit IVs.
|
|
|
+ // Setup the counter blocks.
|
|
|
tmp, tmp2: [ct64.STRIDE][BLOCK_SIZE]byte = ---, ---
|
|
|
ctrs, blks: [ct64.STRIDE][]byte = ---, ---
|
|
|
- ctr: u32 = 2
|
|
|
+ ctr := endian.unchecked_get_u32be(nonce[GCM_NONCE_SIZE:]) + 1
|
|
|
for i in 0 ..< ct64.STRIDE {
|
|
|
// Setup scratch space for the keystream.
|
|
|
blks[i] = tmp2[i][:]
|
|
|
|
|
|
// Pre-copy the IV to all the counter blocks.
|
|
|
ctrs[i] = tmp[i][:]
|
|
|
- copy(ctrs[i], nonce)
|
|
|
+ copy(ctrs[i], nonce[:GCM_NONCE_SIZE])
|
|
|
}
|
|
|
|
|
|
impl := &ctx._impl.(ct64.Context)
|