2015-02-06 14:58:31 -02:00
/**
* AES CTR routines supporting VMX instructions on the Power 8
*
* Copyright ( C ) 2015 International Business Machines Inc .
*
* This program is free software ; you can redistribute it and / or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; version 2 only .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc . , 675 Mass Ave , Cambridge , MA 0213 9 , USA .
*
* Author : Marcelo Henrique Cerri < mhcerri @ br . ibm . com >
*/
# include <linux/types.h>
# include <linux/err.h>
# include <linux/crypto.h>
# include <linux/delay.h>
# include <linux/hardirq.h>
# include <asm/switch_to.h>
# include <crypto/aes.h>
# include <crypto/scatterwalk.h>
2017-10-16 20:54:19 -02:00
# include <crypto/skcipher.h>
2015-02-06 14:58:31 -02:00
# include "aesp8-ppc.h"
struct p8_aes_ctr_ctx {
2017-10-16 20:54:19 -02:00
struct crypto_skcipher * fallback ;
2015-06-15 16:55:46 +08:00
struct aes_key enc_key ;
2015-02-06 14:58:31 -02:00
} ;
static int p8_aes_ctr_init ( struct crypto_tfm * tfm )
{
2017-06-16 11:39:48 +03:00
const char * alg = crypto_tfm_alg_name ( tfm ) ;
2017-10-16 20:54:19 -02:00
struct crypto_skcipher * fallback ;
2015-06-15 16:55:46 +08:00
struct p8_aes_ctr_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
2017-10-16 20:54:19 -02:00
fallback = crypto_alloc_skcipher ( alg , 0 ,
CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK ) ;
2015-06-15 16:55:46 +08:00
if ( IS_ERR ( fallback ) ) {
printk ( KERN_ERR
" Failed to allocate transformation for '%s': %ld \n " ,
alg , PTR_ERR ( fallback ) ) ;
return PTR_ERR ( fallback ) ;
}
printk ( KERN_INFO " Using '%s' as fallback implementation. \n " ,
2017-10-16 20:54:19 -02:00
crypto_skcipher_driver_name ( fallback ) ) ;
2015-06-15 16:55:46 +08:00
2017-10-16 20:54:19 -02:00
crypto_skcipher_set_flags (
2015-06-15 16:55:46 +08:00
fallback ,
2017-10-16 20:54:19 -02:00
crypto_skcipher_get_flags ( ( struct crypto_skcipher * ) tfm ) ) ;
2015-06-15 16:55:46 +08:00
ctx - > fallback = fallback ;
return 0 ;
2015-02-06 14:58:31 -02:00
}
static void p8_aes_ctr_exit ( struct crypto_tfm * tfm )
{
2015-06-15 16:55:46 +08:00
struct p8_aes_ctr_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
2015-02-06 14:58:31 -02:00
2015-06-15 16:55:46 +08:00
if ( ctx - > fallback ) {
2017-10-16 20:54:19 -02:00
crypto_free_skcipher ( ctx - > fallback ) ;
2015-06-15 16:55:46 +08:00
ctx - > fallback = NULL ;
}
2015-02-06 14:58:31 -02:00
}
static int p8_aes_ctr_setkey ( struct crypto_tfm * tfm , const u8 * key ,
2015-06-15 16:55:46 +08:00
unsigned int keylen )
2015-02-06 14:58:31 -02:00
{
2015-06-15 16:55:46 +08:00
int ret ;
struct p8_aes_ctr_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
2015-02-06 14:58:31 -02:00
2017-01-20 16:35:33 +08:00
preempt_disable ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_disable ( ) ;
2015-07-13 13:51:39 -03:00
enable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
ret = aes_p8_set_encrypt_key ( key , keylen * 8 , & ctx - > enc_key ) ;
2015-10-29 11:44:05 +11:00
disable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_enable ( ) ;
2017-01-20 16:35:33 +08:00
preempt_enable ( ) ;
2015-02-06 14:58:31 -02:00
2017-10-16 20:54:19 -02:00
ret + = crypto_skcipher_setkey ( ctx - > fallback , key , keylen ) ;
2015-06-15 16:55:46 +08:00
return ret ;
2015-02-06 14:58:31 -02:00
}
static void p8_aes_ctr_final ( struct p8_aes_ctr_ctx * ctx ,
2015-06-15 16:55:46 +08:00
struct blkcipher_walk * walk )
2015-02-06 14:58:31 -02:00
{
2015-06-15 16:55:46 +08:00
u8 * ctrblk = walk - > iv ;
u8 keystream [ AES_BLOCK_SIZE ] ;
u8 * src = walk - > src . virt . addr ;
u8 * dst = walk - > dst . virt . addr ;
unsigned int nbytes = walk - > nbytes ;
2017-01-20 16:35:33 +08:00
preempt_disable ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_disable ( ) ;
2015-07-13 13:51:39 -03:00
enable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
aes_p8_encrypt ( ctrblk , keystream , & ctx - > enc_key ) ;
2015-10-29 11:44:05 +11:00
disable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_enable ( ) ;
2017-01-20 16:35:33 +08:00
preempt_enable ( ) ;
2015-06-15 16:55:46 +08:00
crypto: algapi - make crypto_xor() take separate dst and src arguments
There are quite a number of occurrences in the kernel of the pattern
if (dst != src)
memcpy(dst, src, walk.total % AES_BLOCK_SIZE);
crypto_xor(dst, final, walk.total % AES_BLOCK_SIZE);
or
crypto_xor(keystream, src, nbytes);
memcpy(dst, keystream, nbytes);
where crypto_xor() is preceded or followed by a memcpy() invocation
that is only there because crypto_xor() uses its output parameter as
one of the inputs. To avoid having to add new instances of this pattern
in the arm64 code, which will be refactored to implement non-SIMD
fallbacks, add an alternative implementation called crypto_xor_cpy(),
taking separate input and output arguments. This removes the need for
the separate memcpy().
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2017-07-24 11:28:04 +01:00
crypto_xor_cpy ( dst , keystream , src , nbytes ) ;
2015-06-15 16:55:46 +08:00
crypto_inc ( ctrblk , AES_BLOCK_SIZE ) ;
2015-02-06 14:58:31 -02:00
}
static int p8_aes_ctr_crypt ( struct blkcipher_desc * desc ,
2015-06-15 16:55:46 +08:00
struct scatterlist * dst ,
struct scatterlist * src , unsigned int nbytes )
2015-02-06 14:58:31 -02:00
{
2015-06-15 16:55:46 +08:00
int ret ;
2015-08-14 10:12:22 -03:00
u64 inc ;
2015-06-15 16:55:46 +08:00
struct blkcipher_walk walk ;
struct p8_aes_ctr_ctx * ctx =
crypto_tfm_ctx ( crypto_blkcipher_tfm ( desc - > tfm ) ) ;
if ( in_interrupt ( ) ) {
2017-10-16 20:54:19 -02:00
SKCIPHER_REQUEST_ON_STACK ( req , ctx - > fallback ) ;
skcipher_request_set_tfm ( req , ctx - > fallback ) ;
skcipher_request_set_callback ( req , desc - > flags , NULL , NULL ) ;
skcipher_request_set_crypt ( req , src , dst , nbytes , desc - > info ) ;
ret = crypto_skcipher_encrypt ( req ) ;
skcipher_request_zero ( req ) ;
2015-06-15 16:55:46 +08:00
} else {
blkcipher_walk_init ( & walk , dst , src , nbytes ) ;
ret = blkcipher_walk_virt_block ( desc , & walk , AES_BLOCK_SIZE ) ;
while ( ( nbytes = walk . nbytes ) > = AES_BLOCK_SIZE ) {
2017-01-20 16:35:33 +08:00
preempt_disable ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_disable ( ) ;
2015-07-13 13:51:39 -03:00
enable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
aes_p8_ctr32_encrypt_blocks ( walk . src . virt . addr ,
walk . dst . virt . addr ,
( nbytes &
AES_BLOCK_MASK ) /
AES_BLOCK_SIZE ,
& ctx - > enc_key ,
walk . iv ) ;
2015-10-29 11:44:05 +11:00
disable_kernel_vsx ( ) ;
2015-06-15 16:55:46 +08:00
pagefault_enable ( ) ;
2017-01-20 16:35:33 +08:00
preempt_enable ( ) ;
2015-06-15 16:55:46 +08:00
2015-08-14 10:12:22 -03:00
/* We need to update IV mostly for last bytes/round */
inc = ( nbytes & AES_BLOCK_MASK ) / AES_BLOCK_SIZE ;
if ( inc > 0 )
while ( inc - - )
crypto_inc ( walk . iv , AES_BLOCK_SIZE ) ;
2015-06-15 16:55:46 +08:00
nbytes & = AES_BLOCK_SIZE - 1 ;
ret = blkcipher_walk_done ( desc , & walk , nbytes ) ;
}
if ( walk . nbytes ) {
p8_aes_ctr_final ( ctx , & walk ) ;
ret = blkcipher_walk_done ( desc , & walk , 0 ) ;
}
}
return ret ;
2015-02-06 14:58:31 -02:00
}
struct crypto_alg p8_aes_ctr_alg = {
2015-06-15 16:55:46 +08:00
. cra_name = " ctr(aes) " ,
. cra_driver_name = " p8_aes_ctr " ,
. cra_module = THIS_MODULE ,
2016-06-10 16:47:03 +10:00
. cra_priority = 2000 ,
2015-06-15 16:55:46 +08:00
. cra_type = & crypto_blkcipher_type ,
. cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK ,
. cra_alignmask = 0 ,
. cra_blocksize = 1 ,
. cra_ctxsize = sizeof ( struct p8_aes_ctr_ctx ) ,
. cra_init = p8_aes_ctr_init ,
. cra_exit = p8_aes_ctr_exit ,
. cra_blkcipher = {
2015-11-30 16:19:03 -02:00
. ivsize = AES_BLOCK_SIZE ,
2015-06-15 16:55:46 +08:00
. min_keysize = AES_MIN_KEY_SIZE ,
. max_keysize = AES_MAX_KEY_SIZE ,
. setkey = p8_aes_ctr_setkey ,
. encrypt = p8_aes_ctr_crypt ,
. decrypt = p8_aes_ctr_crypt ,
} ,
2015-02-06 14:58:31 -02:00
} ;