aria-avx assembly code accesses members of aria_ctx with magic number offset. If the shape of struct aria_ctx is changed carelessly, aria-avx will not work. So, we need to ensure accessing members of aria_ctx with correct offset values, not with magic numbers. It adds ARIA_CTX_enc_key, ARIA_CTX_dec_key, and ARIA_CTX_rounds in the asm-offsets.c So, correct offset definitions will be generated. aria-avx assembly code can access members of aria_ctx safely with these definitions. Signed-off-by: Taehee Yoo <ap420073@xxxxxxxxx> --- v5: - No changes. v4: - Add BUILD_BUG_ON() to check size of fields of aria_ctx. v3: - Patch introduced. arch/x86/crypto/aria-aesni-avx-asm_64.S | 26 +++++++++++-------------- arch/x86/kernel/asm-offsets.c | 11 +++++++++++ crypto/aria_generic.c | 4 ++++ 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/arch/x86/crypto/aria-aesni-avx-asm_64.S b/arch/x86/crypto/aria-aesni-avx-asm_64.S index c75fd7d015ed..e47e7e54e08f 100644 --- a/arch/x86/crypto/aria-aesni-avx-asm_64.S +++ b/arch/x86/crypto/aria-aesni-avx-asm_64.S @@ -8,11 +8,7 @@ #include <linux/linkage.h> #include <asm/frame.h> - -/* struct aria_ctx: */ -#define enc_key 0 -#define dec_key 272 -#define rounds 544 +#include <asm/asm-offsets.h> /* register macros */ #define CTX %rdi @@ -873,7 +869,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way) aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %rax, %r9, 10); - cmpl $12, rounds(CTX); + cmpl $12, ARIA_CTX_rounds(CTX); jne .Laria_192; aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -886,7 +882,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_crypt_16way) aria_fo(%xmm9, %xmm8, %xmm11, %xmm10, %xmm12, %xmm13, %xmm14, %xmm15, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %rax, %r9, 12); - cmpl $14, rounds(CTX); + cmpl $14, ARIA_CTX_rounds(CTX); jne .Laria_256; aria_ff(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -922,7 +918,7 @@ SYM_FUNC_START(aria_aesni_avx_encrypt_16way) FRAME_BEGIN - leaq enc_key(CTX), %r9; + leaq ARIA_CTX_enc_key(CTX), %r9; inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -947,7 +943,7 @@ SYM_FUNC_START(aria_aesni_avx_decrypt_16way) FRAME_BEGIN - leaq dec_key(CTX), %r9; + leaq ARIA_CTX_dec_key(CTX), %r9; inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -1055,7 +1051,7 @@ SYM_FUNC_START(aria_aesni_avx_ctr_crypt_16way) leaq (%rdx), %r11; leaq (%rcx), %rsi; leaq (%rcx), %rdx; - leaq enc_key(CTX), %r9; + leaq ARIA_CTX_enc_key(CTX), %r9; call __aria_aesni_avx_crypt_16way; @@ -1156,7 +1152,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way) %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %rax, %r9, 10); - cmpl $12, rounds(CTX); + cmpl $12, ARIA_CTX_rounds(CTX); jne .Laria_gfni_192; aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -1173,7 +1169,7 @@ SYM_FUNC_START_LOCAL(__aria_aesni_avx_gfni_crypt_16way) %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %rax, %r9, 12); - cmpl $14, rounds(CTX); + cmpl $14, ARIA_CTX_rounds(CTX); jne .Laria_gfni_256; aria_ff_gfni(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, @@ -1217,7 +1213,7 @@ SYM_FUNC_START(aria_aesni_avx_gfni_encrypt_16way) FRAME_BEGIN - leaq enc_key(CTX), %r9; + leaq ARIA_CTX_enc_key(CTX), %r9; inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -1242,7 +1238,7 @@ SYM_FUNC_START(aria_aesni_avx_gfni_decrypt_16way) FRAME_BEGIN - leaq dec_key(CTX), %r9; + leaq ARIA_CTX_dec_key(CTX), %r9; inpack16_pre(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, @@ -1274,7 +1270,7 @@ SYM_FUNC_START(aria_aesni_avx_gfni_ctr_crypt_16way) leaq (%rdx), %r11; leaq (%rcx), %rsi; leaq (%rcx), %rdx; - leaq enc_key(CTX), %r9; + leaq ARIA_CTX_enc_key(CTX), %r9; call __aria_aesni_avx_gfni_crypt_16way; diff --git a/arch/x86/kernel/asm-offsets.c b/arch/x86/kernel/asm-offsets.c index cb50589a7102..32192a91c65b 100644 --- a/arch/x86/kernel/asm-offsets.c +++ b/arch/x86/kernel/asm-offsets.c @@ -7,6 +7,7 @@ #define COMPILE_OFFSETS #include <linux/crypto.h> +#include <crypto/aria.h> #include <linux/sched.h> #include <linux/stddef.h> #include <linux/hardirq.h> @@ -109,6 +110,16 @@ static void __used common(void) OFFSET(TSS_sp1, tss_struct, x86_tss.sp1); OFFSET(TSS_sp2, tss_struct, x86_tss.sp2); +#if defined(CONFIG_CRYPTO_ARIA_AESNI_AVX_X86_64) || \ + defined(CONFIG_CRYPTO_ARIA_AESNI_AVX_X86_64_MODULE) + + /* Offset for fields in aria_ctx */ + BLANK(); + OFFSET(ARIA_CTX_enc_key, aria_ctx, enc_key); + OFFSET(ARIA_CTX_dec_key, aria_ctx, dec_key); + OFFSET(ARIA_CTX_rounds, aria_ctx, rounds); +#endif + if (IS_ENABLED(CONFIG_KVM_INTEL)) { BLANK(); OFFSET(VMX_spec_ctrl, vcpu_vmx, spec_ctrl); diff --git a/crypto/aria_generic.c b/crypto/aria_generic.c index 4cc29b82b99d..d96dfc4fdde6 100644 --- a/crypto/aria_generic.c +++ b/crypto/aria_generic.c @@ -178,6 +178,10 @@ int aria_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) if (key_len != 16 && key_len != 24 && key_len != 32) return -EINVAL; + BUILD_BUG_ON(sizeof(ctx->enc_key) != 272); + BUILD_BUG_ON(sizeof(ctx->dec_key) != 272); + BUILD_BUG_ON(sizeof(int) != sizeof(ctx->rounds)); + ctx->key_length = key_len; ctx->rounds = (key_len + 32) / 4; -- 2.17.1