[PATCH 3/4] crypto: lib/chacha - add strongly-typed state zeroization

Eric Biggers ebiggers at kernel.org
Tue May 6 04:18:23 AEST 2025


From: Eric Biggers <ebiggers at google.com>

Now that the ChaCha state matrix is strongly-typed, add a helper
function chacha_zeroize_state() which zeroizes it.  Then convert all
applicable callers to use it instead of direct memzero_explicit.  No
functional changes.

Signed-off-by: Eric Biggers <ebiggers at google.com>
---
 drivers/char/random.c         | 4 ++--
 fs/bcachefs/checksum.c        | 4 ++--
 include/crypto/chacha.h       | 6 ++++++
 lib/crypto/chacha20poly1305.c | 6 +++---
 4 files changed, 13 insertions(+), 7 deletions(-)

diff --git a/drivers/char/random.c b/drivers/char/random.c
index 9f876ed2655b..5f22a08101f6 100644
--- a/drivers/char/random.c
+++ b/drivers/char/random.c
@@ -420,11 +420,11 @@ static void _get_random_bytes(void *buf, size_t len)
 			++chacha_state.x[13];
 		len -= CHACHA_BLOCK_SIZE;
 		buf += CHACHA_BLOCK_SIZE;
 	}
 
-	memzero_explicit(&chacha_state, sizeof(chacha_state));
+	chacha_zeroize_state(&chacha_state);
 }
 
 /*
  * This returns random bytes in arbitrary quantities. The quality of the
  * random bytes is good as /dev/urandom. In order to ensure that the
@@ -483,11 +483,11 @@ static ssize_t get_random_bytes_user(struct iov_iter *iter)
 		}
 	}
 
 	memzero_explicit(block, sizeof(block));
 out_zero_chacha:
-	memzero_explicit(&chacha_state, sizeof(chacha_state));
+	chacha_zeroize_state(&chacha_state);
 	return ret ? ret : -EFAULT;
 }
 
 /*
  * Batched entropy returns random integers. The quality of the random
diff --git a/fs/bcachefs/checksum.c b/fs/bcachefs/checksum.c
index 312fda4bb1b5..a4df8eba75f3 100644
--- a/fs/bcachefs/checksum.c
+++ b/fs/bcachefs/checksum.c
@@ -111,11 +111,11 @@ static void bch2_chacha20(const struct bch_key *key, struct nonce nonce,
 {
 	struct chacha_state state;
 
 	bch2_chacha20_init(&state, key, nonce);
 	chacha20_crypt(&state, data, data, len);
-	memzero_explicit(&state, sizeof(state));
+	chacha_zeroize_state(&state);
 }
 
 static void bch2_poly1305_init(struct poly1305_desc_ctx *desc,
 			       struct bch_fs *c, struct nonce nonce)
 {
@@ -281,11 +281,11 @@ int __bch2_encrypt_bio(struct bch_fs *c, unsigned type,
 
 		p = bvec_kmap_local(&bv);
 		chacha20_crypt(&chacha_state, p, p, bv.bv_len);
 		kunmap_local(p);
 	}
-	memzero_explicit(&chacha_state, sizeof(chacha_state));
+	chacha_zeroize_state(&chacha_state);
 	return ret;
 }
 
 struct bch_csum bch2_checksum_merge(unsigned type, struct bch_csum a,
 				    struct bch_csum b, size_t b_len)
diff --git a/include/crypto/chacha.h b/include/crypto/chacha.h
index 64fb270f2bfc..7c2e6c68919b 100644
--- a/include/crypto/chacha.h
+++ b/include/crypto/chacha.h
@@ -14,10 +14,11 @@
 
 #ifndef _CRYPTO_CHACHA_H
 #define _CRYPTO_CHACHA_H
 
 #include <linux/unaligned.h>
+#include <linux/string.h>
 #include <linux/types.h>
 
 /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
 #define CHACHA_IV_SIZE		16
 
@@ -106,10 +107,15 @@ static inline void chacha20_crypt(struct chacha_state *state,
 				  u8 *dst, const u8 *src, unsigned int bytes)
 {
 	chacha_crypt(state, dst, src, bytes, 20);
 }
 
+static inline void chacha_zeroize_state(struct chacha_state *state)
+{
+	memzero_explicit(state, sizeof(*state));
+}
+
 #if IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)
 bool chacha_is_arch_optimized(void);
 #else
 static inline bool chacha_is_arch_optimized(void)
 {
diff --git a/lib/crypto/chacha20poly1305.c b/lib/crypto/chacha20poly1305.c
index ed81f0658956..2e7bbc1a67ea 100644
--- a/lib/crypto/chacha20poly1305.c
+++ b/lib/crypto/chacha20poly1305.c
@@ -82,11 +82,11 @@ __chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
 	b.lens[1] = cpu_to_le64(src_len);
 	poly1305_update(&poly1305_state, (u8 *)b.lens, sizeof(b.lens));
 
 	poly1305_final(&poly1305_state, dst + src_len);
 
-	memzero_explicit(chacha_state, sizeof(*chacha_state));
+	chacha_zeroize_state(chacha_state);
 	memzero_explicit(&b, sizeof(b));
 }
 
 void chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
 			      const u8 *ad, const size_t ad_len,
@@ -186,11 +186,11 @@ bool chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
 
 	chacha_init(&chacha_state, k, (u8 *)iv);
 	ret = __chacha20poly1305_decrypt(dst, src, src_len, ad, ad_len,
 					 &chacha_state);
 
-	memzero_explicit(&chacha_state, sizeof(chacha_state));
+	chacha_zeroize_state(&chacha_state);
 	memzero_explicit(iv, sizeof(iv));
 	memzero_explicit(k, sizeof(k));
 	return ret;
 }
 EXPORT_SYMBOL(chacha20poly1305_decrypt);
@@ -326,11 +326,11 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
 			       sizeof(b.mac[1]), src_len, !encrypt);
 		ret = encrypt ||
 		      !crypto_memneq(b.mac[0], b.mac[1], POLY1305_DIGEST_SIZE);
 	}
 
-	memzero_explicit(&chacha_state, sizeof(chacha_state));
+	chacha_zeroize_state(&chacha_state);
 	memzero_explicit(&b, sizeof(b));
 
 	return ret;
 }
 
-- 
2.49.0



More information about the Linuxppc-dev mailing list