diff --git a/crypto/aes/asm/aes-riscv64-zvkned.pl b/crypto/aes/asm/aes-riscv64-zvkned.pl index 2586a71af1..e0f5f19ff5 100644 --- a/crypto/aes/asm/aes-riscv64-zvkned.pl +++ b/crypto/aes/asm/aes-riscv64-zvkned.pl @@ -263,6 +263,8 @@ rv64i_zvkned_encrypt: beq $rounds, $T6, L_enc_256 li $T6, 10 beq $rounds, $T6, L_enc_128 + li $T6, 12 + beq $rounds, $T6, L_enc_192 j L_fail_m2 .size rv64i_zvkned_encrypt,.-rv64i_zvkned_encrypt @@ -315,6 +317,58 @@ L_enc_128: .size L_enc_128,.-L_enc_128 ___ +$code .= <<___; +.p2align 3 +L_enc_192: + @{[vsetivli__x0_4_e32_m1_tu_mu]} + + @{[vle32_v $v10, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v11, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v12, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v13, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v14, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v15, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v16, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v17, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v18, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v19, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v20, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v21, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v22, ($KEYP)]} + + @{[vle32_v $v1, ($INP)]} + + @{[vaesz_vs $v1, $v10]} # with round key w[ 0, 3] + @{[vaesem_vs $v1, $v11]} + @{[vaesem_vs $v1, $v12]} + @{[vaesem_vs $v1, $v13]} + @{[vaesem_vs $v1, $v14]} + @{[vaesem_vs $v1, $v15]} + @{[vaesem_vs $v1, $v16]} + @{[vaesem_vs $v1, $v17]} + @{[vaesem_vs $v1, $v18]} + @{[vaesem_vs $v1, $v19]} + @{[vaesem_vs $v1, $v20]} + @{[vaesem_vs $v1, $v21]} + @{[vaesef_vs $v1, $v22]} + + @{[vse32_v $v1, ($OUTP)]} + ret +.size L_enc_192,.-L_enc_192 +___ + $code .= <<___; .p2align 3 L_enc_256: @@ -399,6 +453,8 @@ rv64i_zvkned_decrypt: beq $rounds, $T6, L_dec_256 li $T6, 10 beq $rounds, $T6, L_dec_128 + li $T6, 12 + beq $rounds, $T6, L_dec_192 j L_fail_m2 .size rv64i_zvkned_decrypt,.-rv64i_zvkned_decrypt @@ -451,6 +507,59 @@ L_dec_128: .size L_dec_128,.-L_dec_128 ___ +$code .= <<___; +.p2align 3 +L_dec_192: + @{[vsetivli__x0_4_e32_m1_tu_mu]} + + @{[vle32_v $v10, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v11, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v12, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v13, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v14, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v15, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v16, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v17, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v18, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v19, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v20, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v21, ($KEYP)]} + addi $KEYP, $KEYP, 16 + @{[vle32_v $v22, ($KEYP)]} + + @{[vle32_v $v1, ($INP)]} + + @{[vaesz_vs $v1, $v22]} # with round key w[48,51] + @{[vaesdm_vs $v1, $v21]} # with round key w[44,47] + @{[vaesdm_vs $v1, $v20]} # with round key w[40,43] + @{[vaesdm_vs $v1, $v19]} # with round key w[36,39] + @{[vaesdm_vs $v1, $v18]} # with round key w[32,35] + @{[vaesdm_vs $v1, $v17]} # with round key w[28,31] + @{[vaesdm_vs $v1, $v16]} # with round key w[24,27] + @{[vaesdm_vs $v1, $v15]} # with round key w[20,23] + @{[vaesdm_vs $v1, $v14]} # with round key w[16,19] + @{[vaesdm_vs $v1, $v13]} # with round key w[12,15] + @{[vaesdm_vs $v1, $v12]} # with round key w[ 8,11] + @{[vaesdm_vs $v1, $v11]} # with round key w[ 4, 7] + @{[vaesdf_vs $v1, $v10]} # with round key w[ 0, 3] + + @{[vse32_v $v1, ($OUTP)]} + + ret +.size L_dec_192,.-L_dec_192 +___ + $code .= <<___; .p2align 3 L_dec_256: diff --git a/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc index 28efc21853..f2353bb3b8 100644 --- a/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_ccm_hw_rv64i.inc @@ -41,12 +41,12 @@ static int ccm_rv64i_zvkned_initkey(PROV_CCM_CTX *ctx, const unsigned char *key, { PROV_AES_CCM_CTX *actx = (PROV_AES_CCM_CTX *)ctx; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { AES_HW_CCM_SET_KEY_FN(rv64i_zvkned_set_encrypt_key, rv64i_zvkned_encrypt, NULL, NULL); } else { - AES_HW_CCM_SET_KEY_FN(AES_set_encrypt_key, AES_encrypt, NULL, NULL) + AES_HW_CCM_SET_KEY_FN(AES_set_encrypt_key, rv64i_zvkned_encrypt, NULL, NULL) } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc index f4665b89e6..b4a6749d3f 100644 --- a/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_gcm_hw_rv64i.inc @@ -41,12 +41,12 @@ static int rv64i_zvkned_gcm_initkey(PROV_GCM_CTX *ctx, const unsigned char *key, { PROV_AES_GCM_CTX *actx = (PROV_AES_GCM_CTX *)ctx; AES_KEY *ks = &actx->ks.ks; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { GCM_HW_SET_KEY_CTR_FN(ks, rv64i_zvkned_set_encrypt_key, rv64i_zvkned_encrypt, NULL); } else { - GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, AES_encrypt, NULL); + GCM_HW_SET_KEY_CTR_FN(ks, AES_set_encrypt_key, rv64i_zvkned_encrypt, NULL); } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc b/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc index 8deaf01be6..2761905ee0 100644 --- a/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc +++ b/providers/implementations/ciphers/cipher_aes_hw_rv64i.inc @@ -71,7 +71,7 @@ static int cipher_hw_rv64i_zvkned_initkey(PROV_CIPHER_CTX *dat, dat->ks = ks; - /* Zvkned only supports 128 and 256 bit keys. */ + /* Zvkned only supports 128 and 256 bit keys for key schedule generation. */ if (keylen * 8 == 128 || keylen * 8 == 256) { if ((dat->mode == EVP_CIPH_ECB_MODE || dat->mode == EVP_CIPH_CBC_MODE) && !dat->enc) { @@ -86,15 +86,13 @@ static int cipher_hw_rv64i_zvkned_initkey(PROV_CIPHER_CTX *dat, } else { if ((dat->mode == EVP_CIPH_ECB_MODE || dat->mode == EVP_CIPH_CBC_MODE) && !dat->enc) { - ret = AES_set_decrypt_key(key, keylen * 8, ks); - dat->block = (block128_f)AES_decrypt; - dat->stream.cbc = (dat->mode == EVP_CIPH_CBC_MODE) - ? (cbc128_f)AES_cbc_encrypt : NULL; + ret = AES_set_encrypt_key(key, keylen * 8, ks); + dat->block = (block128_f) rv64i_zvkned_decrypt; + dat->stream.cbc = NULL; } else { ret = AES_set_encrypt_key(key, keylen * 8, ks); - dat->block = (block128_f)AES_encrypt; - dat->stream.cbc = (dat->mode == EVP_CIPH_CBC_MODE) - ? (cbc128_f)AES_cbc_encrypt : NULL; + dat->block = (block128_f) rv64i_zvkned_encrypt; + dat->stream.cbc = NULL; } } diff --git a/providers/implementations/ciphers/cipher_aes_ocb_hw.c b/providers/implementations/ciphers/cipher_aes_ocb_hw.c index 5e90febe54..7ece4d32c9 100644 --- a/providers/implementations/ciphers/cipher_aes_ocb_hw.c +++ b/providers/implementations/ciphers/cipher_aes_ocb_hw.c @@ -130,8 +130,9 @@ static int cipher_hw_aes_ocb_rv64i_zvkned_initkey(PROV_CIPHER_CTX *vctx, rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, NULL, NULL); } else { - OCB_SET_KEY_FN(AES_set_encrypt_key, AES_set_decrypt_key, - AES_encrypt, AES_decrypt, NULL, NULL); + OCB_SET_KEY_FN(AES_set_encrypt_key, AES_set_encrypt_key, + rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, + NULL, NULL); } return 1; } diff --git a/providers/implementations/ciphers/cipher_aes_xts_hw.c b/providers/implementations/ciphers/cipher_aes_xts_hw.c index 45e6c90090..e5ee6741ea 100644 --- a/providers/implementations/ciphers/cipher_aes_xts_hw.c +++ b/providers/implementations/ciphers/cipher_aes_xts_hw.c @@ -190,8 +190,8 @@ static int cipher_hw_aes_xts_rv64i_zvkned_initkey(PROV_CIPHER_CTX *ctx, rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, stream_enc, stream_dec); } else { - XTS_SET_KEY_FN(AES_set_encrypt_key, AES_set_decrypt_key, - AES_encrypt, AES_decrypt, + XTS_SET_KEY_FN(AES_set_encrypt_key, AES_set_encrypt_key, + rv64i_zvkned_encrypt, rv64i_zvkned_decrypt, stream_enc, stream_dec); } return 1;