* [PATCH] crypto: sha*-mb Fix total_len for correct hash when larger than 512MB
@ 2016-11-14 23:11 Greg Tucker
2016-11-15 16:50 ` Tim Chen
0 siblings, 1 reply; 2+ messages in thread
From: Greg Tucker @ 2016-11-14 23:11 UTC (permalink / raw)
To: herbert, linux-crypto; +Cc: megha.dey, tim.c.chen, xiaodong.liu, Greg Tucker
Current multi-buffer hash implementations have a restriction on the total
length of a hash job to 512MB. Hashing larger buffers will result in an
incorrect hash. This extends the limit to 2^62 - 1.
Signed-off-by: Greg Tucker <greg.b.tucker@intel.com>
---
arch/x86/crypto/sha1-mb/sha1_mb.c | 2 +-
arch/x86/crypto/sha1-mb/sha1_mb_ctx.h | 2 +-
arch/x86/crypto/sha256-mb/sha256_mb.c | 2 +-
arch/x86/crypto/sha256-mb/sha256_mb_ctx.h | 2 +-
arch/x86/crypto/sha512-mb/sha512_mb.c | 2 +-
arch/x86/crypto/sha512-mb/sha512_mb_ctx.h | 2 +-
6 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/arch/x86/crypto/sha1-mb/sha1_mb.c b/arch/x86/crypto/sha1-mb/sha1_mb.c
index 9e5b671..acf9fdf 100644
--- a/arch/x86/crypto/sha1-mb/sha1_mb.c
+++ b/arch/x86/crypto/sha1-mb/sha1_mb.c
@@ -114,7 +114,7 @@ static inline void sha1_init_digest(uint32_t *digest)
}
static inline uint32_t sha1_pad(uint8_t padblock[SHA1_BLOCK_SIZE * 2],
- uint32_t total_len)
+ uint64_t total_len)
{
uint32_t i = total_len & (SHA1_BLOCK_SIZE - 1);
diff --git a/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h b/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
index 98a35bc..13590cc 100644
--- a/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
+++ b/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
@@ -125,7 +125,7 @@ struct sha1_hash_ctx {
/* error flag */
int error;
- uint32_t total_length;
+ uint64_t total_length;
const void *incoming_buffer;
uint32_t incoming_buffer_length;
uint8_t partial_block_buffer[SHA1_BLOCK_SIZE * 2];
diff --git a/arch/x86/crypto/sha256-mb/sha256_mb.c b/arch/x86/crypto/sha256-mb/sha256_mb.c
index 6f97fb3..7926a22 100644
--- a/arch/x86/crypto/sha256-mb/sha256_mb.c
+++ b/arch/x86/crypto/sha256-mb/sha256_mb.c
@@ -115,7 +115,7 @@ inline void sha256_init_digest(uint32_t *digest)
}
inline uint32_t sha256_pad(uint8_t padblock[SHA256_BLOCK_SIZE * 2],
- uint32_t total_len)
+ uint64_t total_len)
{
uint32_t i = total_len & (SHA256_BLOCK_SIZE - 1);
diff --git a/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h b/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
index edd252b..aabb303 100644
--- a/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
+++ b/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
@@ -125,7 +125,7 @@ struct sha256_hash_ctx {
/* error flag */
int error;
- uint32_t total_length;
+ uint64_t total_length;
const void *incoming_buffer;
uint32_t incoming_buffer_length;
uint8_t partial_block_buffer[SHA256_BLOCK_SIZE * 2];
diff --git a/arch/x86/crypto/sha512-mb/sha512_mb.c b/arch/x86/crypto/sha512-mb/sha512_mb.c
index d210174..9c1bb6d 100644
--- a/arch/x86/crypto/sha512-mb/sha512_mb.c
+++ b/arch/x86/crypto/sha512-mb/sha512_mb.c
@@ -117,7 +117,7 @@ inline void sha512_init_digest(uint64_t *digest)
}
inline uint32_t sha512_pad(uint8_t padblock[SHA512_BLOCK_SIZE * 2],
- uint32_t total_len)
+ uint64_t total_len)
{
uint32_t i = total_len & (SHA512_BLOCK_SIZE - 1);
diff --git a/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h b/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
index 9d4b2c8..e4653f5 100644
--- a/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
+++ b/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
@@ -119,7 +119,7 @@ struct sha512_hash_ctx {
/* error flag */
int error;
- uint32_t total_length;
+ uint64_t total_length;
const void *incoming_buffer;
uint32_t incoming_buffer_length;
uint8_t partial_block_buffer[SHA512_BLOCK_SIZE * 2];
--
2.5.5
^ permalink raw reply related [flat|nested] 2+ messages in thread
* Re: [PATCH] crypto: sha*-mb Fix total_len for correct hash when larger than 512MB
2016-11-14 23:11 [PATCH] crypto: sha*-mb Fix total_len for correct hash when larger than 512MB Greg Tucker
@ 2016-11-15 16:50 ` Tim Chen
0 siblings, 0 replies; 2+ messages in thread
From: Tim Chen @ 2016-11-15 16:50 UTC (permalink / raw)
To: Greg Tucker, herbert, linux-crypto; +Cc: megha.dey, xiaodong.liu
On Mon, 2016-11-14 at 16:11 -0700, Greg Tucker wrote:
> Current multi-buffer hash implementations have a restriction on the total
> length of a hash job to 512MB. Hashing larger buffers will result in an
> incorrect hash. This extends the limit to 2^62 - 1.
>
> Signed-off-by: Greg Tucker <greg.b.tucker@intel.com>
Acked-by: Tim Chen <tim.c.chen@linux.intel.com>
> ---
> arch/x86/crypto/sha1-mb/sha1_mb.c | 2 +-
> arch/x86/crypto/sha1-mb/sha1_mb_ctx.h | 2 +-
> arch/x86/crypto/sha256-mb/sha256_mb.c | 2 +-
> arch/x86/crypto/sha256-mb/sha256_mb_ctx.h | 2 +-
> arch/x86/crypto/sha512-mb/sha512_mb.c | 2 +-
> arch/x86/crypto/sha512-mb/sha512_mb_ctx.h | 2 +-
> 6 files changed, 6 insertions(+), 6 deletions(-)
>
> diff --git a/arch/x86/crypto/sha1-mb/sha1_mb.c b/arch/x86/crypto/sha1-mb/sha1_mb.c
> index 9e5b671..acf9fdf 100644
> --- a/arch/x86/crypto/sha1-mb/sha1_mb.c
> +++ b/arch/x86/crypto/sha1-mb/sha1_mb.c
> @@ -114,7 +114,7 @@ static inline void sha1_init_digest(uint32_t *digest)
> }
>
> static inline uint32_t sha1_pad(uint8_t padblock[SHA1_BLOCK_SIZE * 2],
> - uint32_t total_len)
> + uint64_t total_len)
> {
> uint32_t i = total_len & (SHA1_BLOCK_SIZE - 1);
>
> diff --git a/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h b/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
> index 98a35bc..13590cc 100644
> --- a/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
> +++ b/arch/x86/crypto/sha1-mb/sha1_mb_ctx.h
> @@ -125,7 +125,7 @@ struct sha1_hash_ctx {
> /* error flag */
> int error;
>
> - uint32_t total_length;
> + uint64_t total_length;
> const void *incoming_buffer;
> uint32_t incoming_buffer_length;
> uint8_t partial_block_buffer[SHA1_BLOCK_SIZE * 2];
> diff --git a/arch/x86/crypto/sha256-mb/sha256_mb.c b/arch/x86/crypto/sha256-mb/sha256_mb.c
> index 6f97fb3..7926a22 100644
> --- a/arch/x86/crypto/sha256-mb/sha256_mb.c
> +++ b/arch/x86/crypto/sha256-mb/sha256_mb.c
> @@ -115,7 +115,7 @@ inline void sha256_init_digest(uint32_t *digest)
> }
>
> inline uint32_t sha256_pad(uint8_t padblock[SHA256_BLOCK_SIZE * 2],
> - uint32_t total_len)
> + uint64_t total_len)
> {
> uint32_t i = total_len & (SHA256_BLOCK_SIZE - 1);
>
> diff --git a/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h b/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
> index edd252b..aabb303 100644
> --- a/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
> +++ b/arch/x86/crypto/sha256-mb/sha256_mb_ctx.h
> @@ -125,7 +125,7 @@ struct sha256_hash_ctx {
> /* error flag */
> int error;
>
> - uint32_t total_length;
> + uint64_t total_length;
> const void *incoming_buffer;
> uint32_t incoming_buffer_length;
> uint8_t partial_block_buffer[SHA256_BLOCK_SIZE * 2];
> diff --git a/arch/x86/crypto/sha512-mb/sha512_mb.c b/arch/x86/crypto/sha512-mb/sha512_mb.c
> index d210174..9c1bb6d 100644
> --- a/arch/x86/crypto/sha512-mb/sha512_mb.c
> +++ b/arch/x86/crypto/sha512-mb/sha512_mb.c
> @@ -117,7 +117,7 @@ inline void sha512_init_digest(uint64_t *digest)
> }
>
> inline uint32_t sha512_pad(uint8_t padblock[SHA512_BLOCK_SIZE * 2],
> - uint32_t total_len)
> + uint64_t total_len)
> {
> uint32_t i = total_len & (SHA512_BLOCK_SIZE - 1);
>
> diff --git a/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h b/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
> index 9d4b2c8..e4653f5 100644
> --- a/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
> +++ b/arch/x86/crypto/sha512-mb/sha512_mb_ctx.h
> @@ -119,7 +119,7 @@ struct sha512_hash_ctx {
> /* error flag */
> int error;
>
> - uint32_t total_length;
> + uint64_t total_length;
> const void *incoming_buffer;
> uint32_t incoming_buffer_length;
> uint8_t partial_block_buffer[SHA512_BLOCK_SIZE * 2];
^ permalink raw reply [flat|nested] 2+ messages in thread
end of thread, other threads:[~2016-11-15 16:50 UTC | newest]
Thread overview: 2+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2016-11-14 23:11 [PATCH] crypto: sha*-mb Fix total_len for correct hash when larger than 512MB Greg Tucker
2016-11-15 16:50 ` Tim Chen
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).