2019-05-29 17:18:13 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2021-03-20 23:25:25 +03:00
/*
2015-02-06 19:57:53 +03:00
* AES CBC routines supporting VMX instructions on the Power 8
*
* Copyright ( C ) 2015 International Business Machines Inc .
*
* Author : Marcelo Henrique Cerri < mhcerri @ br . ibm . com >
*/
2019-04-13 08:33:12 +03:00
# include <asm/simd.h>
2015-02-06 19:57:53 +03:00
# include <asm/switch_to.h>
# include <crypto/aes.h>
2019-04-13 08:33:12 +03:00
# include <crypto/internal/simd.h>
2019-05-20 19:44:48 +03:00
# include <crypto/internal/skcipher.h>
2015-02-06 19:57:53 +03:00
# include "aesp8-ppc.h"
struct p8_aes_cbc_ctx {
2019-05-20 19:44:48 +03:00
struct crypto_skcipher * fallback ;
2015-06-15 11:55:46 +03:00
struct aes_key enc_key ;
struct aes_key dec_key ;
2015-02-06 19:57:53 +03:00
} ;
2019-05-20 19:44:48 +03:00
static int p8_aes_cbc_init ( struct crypto_skcipher * tfm )
2015-02-06 19:57:53 +03:00
{
2019-05-20 19:44:48 +03:00
struct p8_aes_cbc_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
struct crypto_skcipher * fallback ;
2017-03-01 16:58:20 +03:00
2019-05-20 19:44:48 +03:00
fallback = crypto_alloc_skcipher ( " cbc(aes) " , 0 ,
CRYPTO_ALG_NEED_FALLBACK |
CRYPTO_ALG_ASYNC ) ;
2015-06-15 11:55:46 +03:00
if ( IS_ERR ( fallback ) ) {
2019-05-20 19:44:48 +03:00
pr_err ( " Failed to allocate cbc(aes) fallback: %ld \n " ,
PTR_ERR ( fallback ) ) ;
2015-06-15 11:55:46 +03:00
return PTR_ERR ( fallback ) ;
}
2019-05-20 19:44:48 +03:00
crypto_skcipher_set_reqsize ( tfm , sizeof ( struct skcipher_request ) +
crypto_skcipher_reqsize ( fallback ) ) ;
2015-06-15 11:55:46 +03:00
ctx - > fallback = fallback ;
return 0 ;
2015-02-06 19:57:53 +03:00
}
2019-05-20 19:44:48 +03:00
static void p8_aes_cbc_exit ( struct crypto_skcipher * tfm )
2015-02-06 19:57:53 +03:00
{
2019-05-20 19:44:48 +03:00
struct p8_aes_cbc_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2015-02-06 19:57:53 +03:00
2019-05-20 19:44:48 +03:00
crypto_free_skcipher ( ctx - > fallback ) ;
2015-02-06 19:57:53 +03:00
}
2019-05-20 19:44:48 +03:00
static int p8_aes_cbc_setkey ( struct crypto_skcipher * tfm , const u8 * key ,
2015-06-15 11:55:46 +03:00
unsigned int keylen )
2015-02-06 19:57:53 +03:00
{
2019-05-20 19:44:48 +03:00
struct p8_aes_cbc_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2015-06-15 11:55:46 +03:00
int ret ;
2015-02-06 19:57:53 +03:00
2015-06-23 07:04:48 +03:00
preempt_disable ( ) ;
2015-06-15 11:55:46 +03:00
pagefault_disable ( ) ;
2015-07-13 19:51:39 +03:00
enable_kernel_vsx ( ) ;
2015-06-15 11:55:46 +03:00
ret = aes_p8_set_encrypt_key ( key , keylen * 8 , & ctx - > enc_key ) ;
2019-04-10 09:46:35 +03:00
ret | = aes_p8_set_decrypt_key ( key , keylen * 8 , & ctx - > dec_key ) ;
2015-10-29 03:44:05 +03:00
disable_kernel_vsx ( ) ;
2015-06-15 11:55:46 +03:00
pagefault_enable ( ) ;
2015-06-23 07:04:48 +03:00
preempt_enable ( ) ;
2015-02-06 19:57:53 +03:00
2019-05-20 19:44:48 +03:00
ret | = crypto_skcipher_setkey ( ctx - > fallback , key , keylen ) ;
2019-04-10 09:46:35 +03:00
return ret ? - EINVAL : 0 ;
2015-02-06 19:57:53 +03:00
}
2019-05-20 19:44:48 +03:00
static int p8_aes_cbc_crypt ( struct skcipher_request * req , int enc )
2015-02-06 19:57:53 +03:00
{
2019-05-20 19:44:48 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
const struct p8_aes_cbc_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
struct skcipher_walk walk ;
unsigned int nbytes ;
2015-06-15 11:55:46 +03:00
int ret ;
2019-04-13 08:33:12 +03:00
if ( ! crypto_simd_usable ( ) ) {
2019-05-20 19:44:48 +03:00
struct skcipher_request * subreq = skcipher_request_ctx ( req ) ;
* subreq = * req ;
skcipher_request_set_tfm ( subreq , ctx - > fallback ) ;
return enc ? crypto_skcipher_encrypt ( subreq ) :
crypto_skcipher_decrypt ( subreq ) ;
2015-06-15 11:55:46 +03:00
}
2015-02-06 19:57:53 +03:00
2019-05-20 19:44:48 +03:00
ret = skcipher_walk_virt ( & walk , req , false ) ;
while ( ( nbytes = walk . nbytes ) ! = 0 ) {
preempt_disable ( ) ;
pagefault_disable ( ) ;
enable_kernel_vsx ( ) ;
aes_p8_cbc_encrypt ( walk . src . virt . addr ,
walk . dst . virt . addr ,
round_down ( nbytes , AES_BLOCK_SIZE ) ,
enc ? & ctx - > enc_key : & ctx - > dec_key ,
walk . iv , enc ) ;
disable_kernel_vsx ( ) ;
pagefault_enable ( ) ;
preempt_enable ( ) ;
ret = skcipher_walk_done ( & walk , nbytes % AES_BLOCK_SIZE ) ;
}
2015-06-15 11:55:46 +03:00
return ret ;
2015-02-06 19:57:53 +03:00
}
2019-05-20 19:44:48 +03:00
static int p8_aes_cbc_encrypt ( struct skcipher_request * req )
2015-02-06 19:57:53 +03:00
{
2019-05-20 19:44:48 +03:00
return p8_aes_cbc_crypt ( req , 1 ) ;
2015-02-06 19:57:53 +03:00
}
2019-05-20 19:44:48 +03:00
static int p8_aes_cbc_decrypt ( struct skcipher_request * req )
{
return p8_aes_cbc_crypt ( req , 0 ) ;
}
2015-02-06 19:57:53 +03:00
2019-05-20 19:44:48 +03:00
struct skcipher_alg p8_aes_cbc_alg = {
. base . cra_name = " cbc(aes) " ,
. base . cra_driver_name = " p8_aes_cbc " ,
. base . cra_module = THIS_MODULE ,
. base . cra_priority = 2000 ,
. base . cra_flags = CRYPTO_ALG_NEED_FALLBACK ,
. base . cra_blocksize = AES_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct p8_aes_cbc_ctx ) ,
. setkey = p8_aes_cbc_setkey ,
. encrypt = p8_aes_cbc_encrypt ,
. decrypt = p8_aes_cbc_decrypt ,
. init = p8_aes_cbc_init ,
. exit = p8_aes_cbc_exit ,
. min_keysize = AES_MIN_KEY_SIZE ,
. max_keysize = AES_MAX_KEY_SIZE ,
. ivsize = AES_BLOCK_SIZE ,
2015-02-06 19:57:53 +03:00
} ;