* [PATCH 1/4] crypto: arm64/aes-neonbs - use frame_push/pop consistently
2022-11-29 16:48 [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Ard Biesheuvel
@ 2022-11-29 16:48 ` Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 2/4] crypto: arm64/aes-modes - use frame_push/pop macros consistently Ard Biesheuvel
` (3 subsequent siblings)
4 siblings, 0 replies; 6+ messages in thread
From: Ard Biesheuvel @ 2022-11-29 16:48 UTC (permalink / raw)
To: linux-crypto
Cc: linux-arm-kernel, herbert, keescook, ebiggers, Ard Biesheuvel
Use the frame_push and frame_pop macros consistently to create the stack
frame, so that we will get PAC and/or shadow call stack handling as well
when enabled.
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
---
arch/arm64/crypto/aes-neonbs-core.S | 16 +++++++---------
1 file changed, 7 insertions(+), 9 deletions(-)
diff --git a/arch/arm64/crypto/aes-neonbs-core.S b/arch/arm64/crypto/aes-neonbs-core.S
index 66eb3f076fa7a6cf..e4da53b3f189e2bd 100644
--- a/arch/arm64/crypto/aes-neonbs-core.S
+++ b/arch/arm64/crypto/aes-neonbs-core.S
@@ -772,7 +772,7 @@ SYM_FUNC_START_LOCAL(__xts_crypt8)
eor v6.16b, v6.16b, v31.16b
eor v7.16b, v7.16b, v16.16b
- stp q16, q17, [sp, #16]
+ stp q16, q17, [x6]
mov bskey, x2
mov rounds, x3
@@ -780,8 +780,8 @@ SYM_FUNC_START_LOCAL(__xts_crypt8)
SYM_FUNC_END(__xts_crypt8)
.macro __xts_crypt, do8, o0, o1, o2, o3, o4, o5, o6, o7
- stp x29, x30, [sp, #-48]!
- mov x29, sp
+ frame_push 0, 32
+ add x6, sp, #.Lframe_local_offset
ld1 {v25.16b}, [x5]
@@ -793,7 +793,7 @@ SYM_FUNC_END(__xts_crypt8)
eor v18.16b, \o2\().16b, v27.16b
eor v19.16b, \o3\().16b, v28.16b
- ldp q24, q25, [sp, #16]
+ ldp q24, q25, [x6]
eor v20.16b, \o4\().16b, v29.16b
eor v21.16b, \o5\().16b, v30.16b
@@ -807,7 +807,7 @@ SYM_FUNC_END(__xts_crypt8)
b.gt 0b
st1 {v25.16b}, [x5]
- ldp x29, x30, [sp], #48
+ frame_pop
ret
.endm
@@ -832,9 +832,7 @@ SYM_FUNC_END(aesbs_xts_decrypt)
* int rounds, int blocks, u8 iv[])
*/
SYM_FUNC_START(aesbs_ctr_encrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
-
+ frame_push 0
ldp x7, x8, [x5]
ld1 {v0.16b}, [x5]
CPU_LE( rev x7, x7 )
@@ -874,6 +872,6 @@ CPU_LE( rev x8, x8 )
b.gt 0b
st1 {v0.16b}, [x5]
- ldp x29, x30, [sp], #16
+ frame_pop
ret
SYM_FUNC_END(aesbs_ctr_encrypt)
--
2.35.1
_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel
^ permalink raw reply related [flat|nested] 6+ messages in thread* [PATCH 2/4] crypto: arm64/aes-modes - use frame_push/pop macros consistently
2022-11-29 16:48 [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 1/4] crypto: arm64/aes-neonbs - use frame_push/pop consistently Ard Biesheuvel
@ 2022-11-29 16:48 ` Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 3/4] crypto: arm64/crct10dif " Ard Biesheuvel
` (2 subsequent siblings)
4 siblings, 0 replies; 6+ messages in thread
From: Ard Biesheuvel @ 2022-11-29 16:48 UTC (permalink / raw)
To: linux-crypto
Cc: linux-arm-kernel, herbert, keescook, ebiggers, Ard Biesheuvel
Use the frame_push and frame_pop macros to create the stack frames in
the AES chaining mode wrappers so that they will get PAC and/or shadow
call stack protection when configured.
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
---
arch/arm64/crypto/aes-modes.S | 34 +++++++-------------
1 file changed, 12 insertions(+), 22 deletions(-)
diff --git a/arch/arm64/crypto/aes-modes.S b/arch/arm64/crypto/aes-modes.S
index 5abc834271f4a610..0e834a2c062cf265 100644
--- a/arch/arm64/crypto/aes-modes.S
+++ b/arch/arm64/crypto/aes-modes.S
@@ -52,8 +52,7 @@ SYM_FUNC_END(aes_decrypt_block5x)
*/
AES_FUNC_START(aes_ecb_encrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
enc_prepare w3, x2, x5
@@ -77,14 +76,13 @@ ST5( st1 {v4.16b}, [x0], #16 )
subs w4, w4, #1
bne .Lecbencloop
.Lecbencout:
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_ecb_encrypt)
AES_FUNC_START(aes_ecb_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
dec_prepare w3, x2, x5
@@ -108,7 +106,7 @@ ST5( st1 {v4.16b}, [x0], #16 )
subs w4, w4, #1
bne .Lecbdecloop
.Lecbdecout:
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_ecb_decrypt)
@@ -171,9 +169,6 @@ AES_FUNC_END(aes_cbc_encrypt)
AES_FUNC_END(aes_essiv_cbc_encrypt)
AES_FUNC_START(aes_essiv_cbc_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
-
ld1 {cbciv.16b}, [x5] /* get iv */
mov w8, #14 /* AES-256: 14 rounds */
@@ -182,11 +177,9 @@ AES_FUNC_START(aes_essiv_cbc_decrypt)
b .Lessivcbcdecstart
AES_FUNC_START(aes_cbc_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
-
ld1 {cbciv.16b}, [x5] /* get iv */
.Lessivcbcdecstart:
+ frame_push 0
dec_prepare w3, x2, x6
.LcbcdecloopNx:
@@ -236,7 +229,7 @@ ST5( st1 {v4.16b}, [x0], #16 )
bne .Lcbcdecloop
.Lcbcdecout:
st1 {cbciv.16b}, [x5] /* return iv */
- ldp x29, x30, [sp], #16
+ frame_pop
ret
AES_FUNC_END(aes_cbc_decrypt)
AES_FUNC_END(aes_essiv_cbc_decrypt)
@@ -337,8 +330,7 @@ AES_FUNC_END(aes_cbc_cts_decrypt)
BLOCKS .req x13
BLOCKS_W .req w13
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
enc_prepare ROUNDS_W, KEY, IV_PART
ld1 {vctr.16b}, [IV]
@@ -481,7 +473,7 @@ ST5( st1 {v4.16b}, [OUT], #16 )
.if !\xctr
st1 {vctr.16b}, [IV] /* return next CTR value */
.endif
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.Lctrtail\xctr:
@@ -645,8 +637,7 @@ AES_FUNC_END(aes_xctr_encrypt)
.endm
AES_FUNC_START(aes_xts_encrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
ld1 {v4.16b}, [x6]
xts_load_mask v8
@@ -704,7 +695,7 @@ AES_FUNC_START(aes_xts_encrypt)
st1 {v0.16b}, [x0]
.Lxtsencret:
st1 {v4.16b}, [x6]
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.LxtsencctsNx:
@@ -732,8 +723,7 @@ AES_FUNC_START(aes_xts_encrypt)
AES_FUNC_END(aes_xts_encrypt)
AES_FUNC_START(aes_xts_decrypt)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 0
/* subtract 16 bytes if we are doing CTS */
sub w8, w4, #0x10
@@ -794,7 +784,7 @@ AES_FUNC_START(aes_xts_decrypt)
b .Lxtsdecloop
.Lxtsdecout:
st1 {v4.16b}, [x6]
- ldp x29, x30, [sp], #16
+ frame_pop
ret
.Lxtsdeccts:
--
2.35.1
_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel
^ permalink raw reply related [flat|nested] 6+ messages in thread* [PATCH 3/4] crypto: arm64/crct10dif - use frame_push/pop macros consistently
2022-11-29 16:48 [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 1/4] crypto: arm64/aes-neonbs - use frame_push/pop consistently Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 2/4] crypto: arm64/aes-modes - use frame_push/pop macros consistently Ard Biesheuvel
@ 2022-11-29 16:48 ` Ard Biesheuvel
2022-11-29 16:48 ` [PATCH 4/4] crypto: arm64/ghash-ce " Ard Biesheuvel
2022-12-09 11:07 ` [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Herbert Xu
4 siblings, 0 replies; 6+ messages in thread
From: Ard Biesheuvel @ 2022-11-29 16:48 UTC (permalink / raw)
To: linux-crypto
Cc: linux-arm-kernel, herbert, keescook, ebiggers, Ard Biesheuvel
Use the frame_push and frame_pop macros to set up the stack frame so
that return address protections will be enabled automically when
configured.
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
---
arch/arm64/crypto/crct10dif-ce-core.S | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/arch/arm64/crypto/crct10dif-ce-core.S b/arch/arm64/crypto/crct10dif-ce-core.S
index dce6dcebfca189ee..5604de61d06d04ee 100644
--- a/arch/arm64/crypto/crct10dif-ce-core.S
+++ b/arch/arm64/crypto/crct10dif-ce-core.S
@@ -429,7 +429,7 @@ CPU_LE( ext v0.16b, v0.16b, v0.16b, #8 )
umov w0, v0.h[0]
.ifc \p, p8
- ldp x29, x30, [sp], #16
+ frame_pop
.endif
ret
@@ -466,8 +466,7 @@ CPU_LE( ext v7.16b, v7.16b, v7.16b, #8 )
// Assumes len >= 16.
//
SYM_FUNC_START(crc_t10dif_pmull_p8)
- stp x29, x30, [sp, #-16]!
- mov x29, sp
+ frame_push 1
crc_t10dif_pmull p8
SYM_FUNC_END(crc_t10dif_pmull_p8)
--
2.35.1
_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel
^ permalink raw reply related [flat|nested] 6+ messages in thread
* [PATCH 4/4] crypto: arm64/ghash-ce - use frame_push/pop macros consistently
2022-11-29 16:48 [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Ard Biesheuvel
` (2 preceding siblings ...)
2022-11-29 16:48 ` [PATCH 3/4] crypto: arm64/crct10dif " Ard Biesheuvel
@ 2022-11-29 16:48 ` Ard Biesheuvel
2022-12-09 11:07 ` [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Herbert Xu
4 siblings, 0 replies; 6+ messages in thread
From: Ard Biesheuvel @ 2022-11-29 16:48 UTC (permalink / raw)
To: linux-crypto
Cc: linux-arm-kernel, herbert, keescook, ebiggers, Ard Biesheuvel
Use the frame_push and frame_pop macros to set up the stack frame so
that return address protections will be enabled automically when
configured.
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
---
arch/arm64/crypto/ghash-ce-core.S | 8 +++-----
1 file changed, 3 insertions(+), 5 deletions(-)
diff --git a/arch/arm64/crypto/ghash-ce-core.S b/arch/arm64/crypto/ghash-ce-core.S
index ebe5558929b7bba6..23ee9a5eaf27c23c 100644
--- a/arch/arm64/crypto/ghash-ce-core.S
+++ b/arch/arm64/crypto/ghash-ce-core.S
@@ -436,9 +436,7 @@ SYM_FUNC_END(pmull_ghash_update_p8)
.align 6
.macro pmull_gcm_do_crypt, enc
- stp x29, x30, [sp, #-32]!
- mov x29, sp
- str x19, [sp, #24]
+ frame_push 1
load_round_keys x7, x6, x8
@@ -529,7 +527,7 @@ CPU_LE( rev w8, w8 )
.endif
bne 0b
-3: ldp x19, x10, [sp, #24]
+3: ldr x10, [sp, #.Lframe_local_offset]
cbz x10, 5f // output tag?
ld1 {INP3.16b}, [x10] // load lengths[]
@@ -562,7 +560,7 @@ CPU_LE( rev w8, w8 )
smov w0, v0.b[0] // return b0
.endif
-4: ldp x29, x30, [sp], #32
+4: frame_pop
ret
5:
--
2.35.1
_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel
^ permalink raw reply related [flat|nested] 6+ messages in thread* Re: [PATCH 0/4] crypto: arm64 - use frame_push/pop macros
2022-11-29 16:48 [PATCH 0/4] crypto: arm64 - use frame_push/pop macros Ard Biesheuvel
` (3 preceding siblings ...)
2022-11-29 16:48 ` [PATCH 4/4] crypto: arm64/ghash-ce " Ard Biesheuvel
@ 2022-12-09 11:07 ` Herbert Xu
4 siblings, 0 replies; 6+ messages in thread
From: Herbert Xu @ 2022-12-09 11:07 UTC (permalink / raw)
To: Ard Biesheuvel; +Cc: linux-crypto, linux-arm-kernel, keescook, ebiggers
On Tue, Nov 29, 2022 at 05:48:48PM +0100, Ard Biesheuvel wrote:
> We have a pair of macros on arm64 that can be used in asm code to set up
> and tear down the stack frame when implementing a non-leaf function.
>
> We will be adding support for shadow call stack and pointer
> authentication to those macros, so that the code in question is less
> likely to be abused for someone's ROP/JOP enjoyment. So let's fix the
> existing crypto code to use those macros where they should be used.
>
> Ard Biesheuvel (4):
> crypto: arm64/aes-neonbs - use frame_push/pop consistently
> crypto: arm64/aes-modes - use frame_push/pop macros consistently
> crypto: arm64/crct10dif - use frame_push/pop macros consistently
> crypto: arm64/ghash-ce - use frame_push/pop macros consistently
>
> arch/arm64/crypto/aes-modes.S | 34 +++++++-------------
> arch/arm64/crypto/aes-neonbs-core.S | 16 ++++-----
> arch/arm64/crypto/crct10dif-ce-core.S | 5 ++-
> arch/arm64/crypto/ghash-ce-core.S | 8 ++---
> 4 files changed, 24 insertions(+), 39 deletions(-)
>
> --
> 2.35.1
All applied. Thanks.
--
Email: Herbert Xu <herbert@gondor.apana.org.au>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt
_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel
^ permalink raw reply [flat|nested] 6+ messages in thread