summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2025-05-23 20:28:56 +0800
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2025-08-15 16:38:58 +0200
commitf6b54b13656d9b8b8411cf62bf03cc5ebba6f4f1 (patch)
treed6a3c73d7ed2361de720c89f1a49f9745c65bb0c
parent6d0002ae94b568fcd23dd69bbb5253188d395cfa (diff)
crypto: s390/sha3 - Use cpu byte-order when exporting
[ Upstream commit 73c2437109c3eab274258a6430ae5dafac1ef43e ] The sha3 partial hash on s390 is in little-endian just like the final hash. However the generic implementation produces native or big-endian partial hashes. Make s390 sha3 conform to that by doing the byte-swap on export and import. Reported-by: Ingo Franzki <ifranzki@linux.ibm.com> Fixes: 6f90ba706551 ("crypto: s390/sha3 - Use API partial block handling") Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: Sasha Levin <sashal@kernel.org>
-rw-r--r--arch/s390/crypto/sha.h3
-rw-r--r--arch/s390/crypto/sha3_256_s390.c24
-rw-r--r--arch/s390/crypto/sha3_512_s390.c25
3 files changed, 37 insertions, 15 deletions
diff --git a/arch/s390/crypto/sha.h b/arch/s390/crypto/sha.h
index d757ccbce2b4..cadb4b13622a 100644
--- a/arch/s390/crypto/sha.h
+++ b/arch/s390/crypto/sha.h
@@ -27,6 +27,9 @@ struct s390_sha_ctx {
u64 state[SHA512_DIGEST_SIZE / sizeof(u64)];
u64 count_hi;
} sha512;
+ struct {
+ __le64 state[SHA3_STATE_SIZE / sizeof(u64)];
+ } sha3;
};
int func; /* KIMD function to use */
bool first_message_part;
diff --git a/arch/s390/crypto/sha3_256_s390.c b/arch/s390/crypto/sha3_256_s390.c
index 4a7731ac6bcd..03bb4f4bab70 100644
--- a/arch/s390/crypto/sha3_256_s390.c
+++ b/arch/s390/crypto/sha3_256_s390.c
@@ -35,23 +35,33 @@ static int sha3_256_init(struct shash_desc *desc)
static int sha3_256_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
- struct sha3_state *octx = out;
+ union {
+ u8 *u8;
+ u64 *u64;
+ } p = { .u8 = out };
+ int i;
if (sctx->first_message_part) {
- memset(sctx->state, 0, sizeof(sctx->state));
- sctx->first_message_part = 0;
+ memset(out, 0, SHA3_STATE_SIZE);
+ return 0;
}
- memcpy(octx->st, sctx->state, sizeof(octx->st));
+ for (i = 0; i < SHA3_STATE_SIZE / 8; i++)
+ put_unaligned(le64_to_cpu(sctx->sha3.state[i]), p.u64++);
return 0;
}
static int sha3_256_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
- const struct sha3_state *ictx = in;
-
+ union {
+ const u8 *u8;
+ const u64 *u64;
+ } p = { .u8 = in };
+ int i;
+
+ for (i = 0; i < SHA3_STATE_SIZE / 8; i++)
+ sctx->sha3.state[i] = cpu_to_le64(get_unaligned(p.u64++));
sctx->count = 0;
- memcpy(sctx->state, ictx->st, sizeof(ictx->st));
sctx->first_message_part = 0;
sctx->func = CPACF_KIMD_SHA3_256;
diff --git a/arch/s390/crypto/sha3_512_s390.c b/arch/s390/crypto/sha3_512_s390.c
index 018f02fff444..a5c9690eecb1 100644
--- a/arch/s390/crypto/sha3_512_s390.c
+++ b/arch/s390/crypto/sha3_512_s390.c
@@ -34,24 +34,33 @@ static int sha3_512_init(struct shash_desc *desc)
static int sha3_512_export(struct shash_desc *desc, void *out)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
- struct sha3_state *octx = out;
-
+ union {
+ u8 *u8;
+ u64 *u64;
+ } p = { .u8 = out };
+ int i;
if (sctx->first_message_part) {
- memset(sctx->state, 0, sizeof(sctx->state));
- sctx->first_message_part = 0;
+ memset(out, 0, SHA3_STATE_SIZE);
+ return 0;
}
- memcpy(octx->st, sctx->state, sizeof(octx->st));
+ for (i = 0; i < SHA3_STATE_SIZE / 8; i++)
+ put_unaligned(le64_to_cpu(sctx->sha3.state[i]), p.u64++);
return 0;
}
static int sha3_512_import(struct shash_desc *desc, const void *in)
{
struct s390_sha_ctx *sctx = shash_desc_ctx(desc);
- const struct sha3_state *ictx = in;
-
+ union {
+ const u8 *u8;
+ const u64 *u64;
+ } p = { .u8 = in };
+ int i;
+
+ for (i = 0; i < SHA3_STATE_SIZE / 8; i++)
+ sctx->sha3.state[i] = cpu_to_le64(get_unaligned(p.u64++));
sctx->count = 0;
- memcpy(sctx->state, ictx->st, sizeof(ictx->st));
sctx->first_message_part = 0;
sctx->func = CPACF_KIMD_SHA3_512;