Jiri Slaby 1d1af6
From: Herbert Xu <herbert@gondor.apana.org.au>
Jiri Slaby 1d1af6
Date: Thu, 2 Feb 2023 16:33:47 +0800
Jiri Slaby 1d1af6
Subject: [PATCH] crypto: arm64/sm4-gcm - Fix possible crash in GCM cryption
Jiri Slaby 1d1af6
References: bsc#1012628
Jiri Slaby 1d1af6
Patch-mainline: 6.2.2
Jiri Slaby 1d1af6
Git-commit: 4e4a08868f15897ca236528771c3733fded42c62
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
commit 4e4a08868f15897ca236528771c3733fded42c62 upstream.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
An often overlooked aspect of the skcipher walker API is that an
Jiri Slaby 1d1af6
error is not just indicated by a non-zero return value, but by the
Jiri Slaby 1d1af6
fact that walk->nbytes is zero.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
Thus it is an error to call skcipher_walk_done after getting back
Jiri Slaby 1d1af6
walk->nbytes == 0 from the previous interaction with the walker.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
This is because when walk->nbytes is zero the walker is left in
Jiri Slaby 1d1af6
an undefined state and any further calls to it may try to free
Jiri Slaby 1d1af6
uninitialised stack memory.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
The sm4 arm64 ccm code gets this wrong and ends up calling
Jiri Slaby 1d1af6
skcipher_walk_done even when walk->nbytes is zero.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
This patch rewrites the loop in a form that resembles other callers.
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
Reported-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Jiri Slaby 1d1af6
Fixes: ae1b83c7d572 ("crypto: arm64/sm4 - add CE implementation for GCM mode")
Jiri Slaby 1d1af6
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Jiri Slaby 1d1af6
Tested-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Jiri Slaby 1d1af6
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Jiri Slaby 1d1af6
Cc: Nathan Chancellor <nathan@kernel.org>
Jiri Slaby 1d1af6
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Jiri Slaby 1d1af6
Signed-off-by: Jiri Slaby <jslaby@suse.cz>
Jiri Slaby 1d1af6
---
Jiri Slaby 1d1af6
 arch/arm64/crypto/sm4-ce-gcm-glue.c | 51 ++++++++++++++---------------
Jiri Slaby 1d1af6
 1 file changed, 25 insertions(+), 26 deletions(-)
Jiri Slaby 1d1af6
Jiri Slaby 1d1af6
diff --git a/arch/arm64/crypto/sm4-ce-gcm-glue.c b/arch/arm64/crypto/sm4-ce-gcm-glue.c
Jiri Slaby 1d1af6
index c450a202..73bfb697 100644
Jiri Slaby 1d1af6
--- a/arch/arm64/crypto/sm4-ce-gcm-glue.c
Jiri Slaby 1d1af6
+++ b/arch/arm64/crypto/sm4-ce-gcm-glue.c
Jiri Slaby 1d1af6
@@ -135,22 +135,23 @@ static void gcm_calculate_auth_mac(struct aead_request *req, u8 ghash[])
Jiri Slaby 1d1af6
 }
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
Jiri Slaby 1d1af6
-		     struct sm4_gcm_ctx *ctx, u8 ghash[],
Jiri Slaby 1d1af6
+		     u8 ghash[], int err,
Jiri Slaby 1d1af6
 		     void (*sm4_ce_pmull_gcm_crypt)(const u32 *rkey_enc,
Jiri Slaby 1d1af6
 				u8 *dst, const u8 *src, u8 *iv,
Jiri Slaby 1d1af6
 				unsigned int nbytes, u8 *ghash,
Jiri Slaby 1d1af6
 				const u8 *ghash_table, const u8 *lengths))
Jiri Slaby 1d1af6
 {
Jiri Slaby 1d1af6
+	struct crypto_aead *aead = crypto_aead_reqtfm(req);
Jiri Slaby 1d1af6
+	struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
Jiri Slaby 1d1af6
 	u8 __aligned(8) iv[SM4_BLOCK_SIZE];
Jiri Slaby 1d1af6
 	be128 __aligned(8) lengths;
Jiri Slaby 1d1af6
-	int err;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 	memset(ghash, 0, SM4_BLOCK_SIZE);
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 	lengths.a = cpu_to_be64(req->assoclen * 8);
Jiri Slaby 1d1af6
 	lengths.b = cpu_to_be64(walk->total * 8);
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
-	memcpy(iv, walk->iv, GCM_IV_SIZE);
Jiri Slaby 1d1af6
+	memcpy(iv, req->iv, GCM_IV_SIZE);
Jiri Slaby 1d1af6
 	put_unaligned_be32(2, iv + GCM_IV_SIZE);
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 	kernel_neon_begin();
Jiri Slaby 1d1af6
@@ -158,49 +159,51 @@ static int gcm_crypt(struct aead_request *req, struct skcipher_walk *walk,
Jiri Slaby 1d1af6
 	if (req->assoclen)
Jiri Slaby 1d1af6
 		gcm_calculate_auth_mac(req, ghash);
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
-	do {
Jiri Slaby 1d1af6
+	while (walk->nbytes) {
Jiri Slaby 1d1af6
 		unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
Jiri Slaby 1d1af6
 		const u8 *src = walk->src.virt.addr;
Jiri Slaby 1d1af6
 		u8 *dst = walk->dst.virt.addr;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 		if (walk->nbytes == walk->total) {
Jiri Slaby 1d1af6
-			tail = 0;
Jiri Slaby 1d1af6
-
Jiri Slaby 1d1af6
 			sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
Jiri Slaby 1d1af6
 					       walk->nbytes, ghash,
Jiri Slaby 1d1af6
 					       ctx->ghash_table,
Jiri Slaby 1d1af6
 					       (const u8 *)&lengths);
Jiri Slaby 1d1af6
-		} else if (walk->nbytes - tail) {
Jiri Slaby 1d1af6
-			sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
Jiri Slaby 1d1af6
-					       walk->nbytes - tail, ghash,
Jiri Slaby 1d1af6
-					       ctx->ghash_table, NULL);
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
+			kernel_neon_end();
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
+			return skcipher_walk_done(walk, 0);
Jiri Slaby 1d1af6
 		}
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
+		sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, dst, src, iv,
Jiri Slaby 1d1af6
+				       walk->nbytes - tail, ghash,
Jiri Slaby 1d1af6
+				       ctx->ghash_table, NULL);
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
 		kernel_neon_end();
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 		err = skcipher_walk_done(walk, tail);
Jiri Slaby 1d1af6
-		if (err)
Jiri Slaby 1d1af6
-			return err;
Jiri Slaby 1d1af6
-		if (walk->nbytes)
Jiri Slaby 1d1af6
-			kernel_neon_begin();
Jiri Slaby 1d1af6
-	} while (walk->nbytes > 0);
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
-	return 0;
Jiri Slaby 1d1af6
+		kernel_neon_begin();
Jiri Slaby 1d1af6
+	}
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
+	sm4_ce_pmull_gcm_crypt(ctx->key.rkey_enc, NULL, NULL, iv,
Jiri Slaby 1d1af6
+			       walk->nbytes, ghash, ctx->ghash_table,
Jiri Slaby 1d1af6
+			       (const u8 *)&lengths);
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
+	kernel_neon_end();
Jiri Slaby 1d1af6
+
Jiri Slaby 1d1af6
+	return err;
Jiri Slaby 1d1af6
 }
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 static int gcm_encrypt(struct aead_request *req)
Jiri Slaby 1d1af6
 {
Jiri Slaby 1d1af6
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
Jiri Slaby 1d1af6
-	struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
Jiri Slaby 1d1af6
 	u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
Jiri Slaby 1d1af6
 	struct skcipher_walk walk;
Jiri Slaby 1d1af6
 	int err;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 	err = skcipher_walk_aead_encrypt(&walk, req, false);
Jiri Slaby 1d1af6
-	if (err)
Jiri Slaby 1d1af6
-		return err;
Jiri Slaby 1d1af6
-
Jiri Slaby 1d1af6
-	err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_enc);
Jiri Slaby 1d1af6
+	err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_enc);
Jiri Slaby 1d1af6
 	if (err)
Jiri Slaby 1d1af6
 		return err;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
@@ -215,17 +218,13 @@ static int gcm_decrypt(struct aead_request *req)
Jiri Slaby 1d1af6
 {
Jiri Slaby 1d1af6
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
Jiri Slaby 1d1af6
 	unsigned int authsize = crypto_aead_authsize(aead);
Jiri Slaby 1d1af6
-	struct sm4_gcm_ctx *ctx = crypto_aead_ctx(aead);
Jiri Slaby 1d1af6
 	u8 __aligned(8) ghash[SM4_BLOCK_SIZE];
Jiri Slaby 1d1af6
 	u8 authtag[SM4_BLOCK_SIZE];
Jiri Slaby 1d1af6
 	struct skcipher_walk walk;
Jiri Slaby 1d1af6
 	int err;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
 	err = skcipher_walk_aead_decrypt(&walk, req, false);
Jiri Slaby 1d1af6
-	if (err)
Jiri Slaby 1d1af6
-		return err;
Jiri Slaby 1d1af6
-
Jiri Slaby 1d1af6
-	err = gcm_crypt(req, &walk, ctx, ghash, sm4_ce_pmull_gcm_dec);
Jiri Slaby 1d1af6
+	err = gcm_crypt(req, &walk, ghash, err, sm4_ce_pmull_gcm_dec);
Jiri Slaby 1d1af6
 	if (err)
Jiri Slaby 1d1af6
 		return err;
Jiri Slaby 1d1af6
 
Jiri Slaby 1d1af6
-- 
Jiri Slaby 1d1af6
2.35.3
Jiri Slaby 1d1af6