2019-05-27 08:55:01 +02:00
// SPDX-License-Identifier: GPL-2.0-or-later
2012-10-26 14:49:01 +03:00
/*
* Glue Code for x86_64 / AVX / AES - NI assembler optimized version of Camellia
*
2013-04-08 21:51:11 +03:00
* Copyright © 2012 - 2013 Jussi Kivilinna < jussi . kivilinna @ iki . fi >
2012-10-26 14:49:01 +03:00
*/
# include <asm/crypto/camellia.h>
# include <asm/crypto/glue_helper.h>
2018-02-19 23:48:23 -08:00
# include <crypto/algapi.h>
# include <crypto/internal/simd.h>
# include <crypto/xts.h>
# include <linux/crypto.h>
# include <linux/err.h>
# include <linux/module.h>
# include <linux/types.h>
2012-10-26 14:49:01 +03:00
# define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
2013-04-13 13:47:00 +03:00
/* 16-way parallel cipher functions (avx/aes-ni) */
2012-10-26 14:49:01 +03:00
asmlinkage void camellia_ecb_enc_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_ecb_enc_16way ) ;
2012-10-26 14:49:01 +03:00
asmlinkage void camellia_ecb_dec_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_ecb_dec_16way ) ;
2012-10-26 14:49:01 +03:00
asmlinkage void camellia_cbc_dec_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_cbc_dec_16way ) ;
2012-10-26 14:49:01 +03:00
asmlinkage void camellia_ctr_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src , le128 * iv ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_ctr_16way ) ;
2012-10-26 14:49:01 +03:00
2013-04-08 21:51:11 +03:00
asmlinkage void camellia_xts_enc_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src , le128 * iv ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_xts_enc_16way ) ;
2013-04-08 21:51:11 +03:00
asmlinkage void camellia_xts_dec_16way ( struct camellia_ctx * ctx , u8 * dst ,
const u8 * src , le128 * iv ) ;
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_xts_dec_16way ) ;
2013-04-08 21:51:11 +03:00
2013-04-13 13:47:00 +03:00
void camellia_xts_enc ( void * ctx , u128 * dst , const u128 * src , le128 * iv )
2013-04-08 21:51:11 +03:00
{
glue_xts_crypt_128bit_one ( ctx , dst , src , iv ,
GLUE_FUNC_CAST ( camellia_enc_blk ) ) ;
}
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_xts_enc ) ;
2013-04-08 21:51:11 +03:00
2013-04-13 13:47:00 +03:00
void camellia_xts_dec ( void * ctx , u128 * dst , const u128 * src , le128 * iv )
2013-04-08 21:51:11 +03:00
{
glue_xts_crypt_128bit_one ( ctx , dst , src , iv ,
GLUE_FUNC_CAST ( camellia_dec_blk ) ) ;
}
2013-04-13 13:47:00 +03:00
EXPORT_SYMBOL_GPL ( camellia_xts_dec ) ;
2013-04-08 21:51:11 +03:00
2012-10-26 14:49:01 +03:00
static const struct common_glue_ctx camellia_enc = {
. num_funcs = 3 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_ecb_enc_16way ) }
} , {
. num_blocks = 2 ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_enc_blk_2way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_enc_blk ) }
} }
} ;
static const struct common_glue_ctx camellia_ctr = {
. num_funcs = 3 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . ctr = GLUE_CTR_FUNC_CAST ( camellia_ctr_16way ) }
} , {
. num_blocks = 2 ,
. fn_u = { . ctr = GLUE_CTR_FUNC_CAST ( camellia_crypt_ctr_2way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . ctr = GLUE_CTR_FUNC_CAST ( camellia_crypt_ctr ) }
} }
} ;
2013-04-08 21:51:11 +03:00
static const struct common_glue_ctx camellia_enc_xts = {
. num_funcs = 2 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . xts = GLUE_XTS_FUNC_CAST ( camellia_xts_enc_16way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . xts = GLUE_XTS_FUNC_CAST ( camellia_xts_enc ) }
} }
} ;
2012-10-26 14:49:01 +03:00
static const struct common_glue_ctx camellia_dec = {
. num_funcs = 3 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_ecb_dec_16way ) }
} , {
. num_blocks = 2 ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_dec_blk_2way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . ecb = GLUE_FUNC_CAST ( camellia_dec_blk ) }
} }
} ;
static const struct common_glue_ctx camellia_dec_cbc = {
. num_funcs = 3 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . cbc = GLUE_CBC_FUNC_CAST ( camellia_cbc_dec_16way ) }
} , {
. num_blocks = 2 ,
. fn_u = { . cbc = GLUE_CBC_FUNC_CAST ( camellia_decrypt_cbc_2way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . cbc = GLUE_CBC_FUNC_CAST ( camellia_dec_blk ) }
} }
} ;
2013-04-08 21:51:11 +03:00
static const struct common_glue_ctx camellia_dec_xts = {
. num_funcs = 2 ,
. fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. funcs = { {
. num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS ,
. fn_u = { . xts = GLUE_XTS_FUNC_CAST ( camellia_xts_dec_16way ) }
} , {
. num_blocks = 1 ,
. fn_u = { . xts = GLUE_XTS_FUNC_CAST ( camellia_xts_dec ) }
} }
} ;
2018-02-19 23:48:23 -08:00
static int camellia_setkey ( struct crypto_skcipher * tfm , const u8 * key ,
unsigned int keylen )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return __camellia_setkey ( crypto_skcipher_ctx ( tfm ) , key , keylen ,
& tfm - > base . crt_flags ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int ecb_encrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return glue_ecb_req_128bit ( & camellia_enc , req ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int ecb_decrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return glue_ecb_req_128bit ( & camellia_dec , req ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int cbc_encrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return glue_cbc_encrypt_req_128bit ( GLUE_FUNC_CAST ( camellia_enc_blk ) ,
req ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int cbc_decrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return glue_cbc_decrypt_req_128bit ( & camellia_dec_cbc , req ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int ctr_crypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
return glue_ctr_req_128bit ( & camellia_ctr , req ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
int xts_camellia_setkey ( struct crypto_skcipher * tfm , const u8 * key ,
2018-02-19 23:48:21 -08:00
unsigned int keylen )
{
2018-02-19 23:48:23 -08:00
struct camellia_xts_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
u32 * flags = & tfm - > base . crt_flags ;
2018-02-19 23:48:21 -08:00
int err ;
2018-02-19 23:48:23 -08:00
err = xts_verify_key ( tfm , key , keylen ) ;
2018-02-19 23:48:21 -08:00
if ( err )
return err ;
/* first half of xts-key is for crypt */
err = __camellia_setkey ( & ctx - > crypt_ctx , key , keylen / 2 , flags ) ;
if ( err )
return err ;
/* second half of xts-key is for tweak */
return __camellia_setkey ( & ctx - > tweak_ctx , key + keylen / 2 , keylen / 2 ,
flags ) ;
}
EXPORT_SYMBOL_GPL ( xts_camellia_setkey ) ;
2018-02-19 23:48:23 -08:00
static int xts_encrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct camellia_xts_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2012-10-26 14:49:01 +03:00
2018-02-19 23:48:23 -08:00
return glue_xts_req_128bit ( & camellia_enc_xts , req ,
XTS_TWEAK_CAST ( camellia_enc_blk ) ,
& ctx - > tweak_ctx , & ctx - > crypt_ctx ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static int xts_decrypt ( struct skcipher_request * req )
2012-10-26 14:49:01 +03:00
{
2018-02-19 23:48:23 -08:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct camellia_xts_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2012-10-26 14:49:01 +03:00
2018-02-19 23:48:23 -08:00
return glue_xts_req_128bit ( & camellia_dec_xts , req ,
XTS_TWEAK_CAST ( camellia_enc_blk ) ,
& ctx - > tweak_ctx , & ctx - > crypt_ctx ) ;
2012-10-26 14:49:01 +03:00
}
2018-02-19 23:48:23 -08:00
static struct skcipher_alg camellia_algs [ ] = {
{
. base . cra_name = " __ecb(camellia) " ,
. base . cra_driver_name = " __ecb-camellia-aesni " ,
. base . cra_priority = 400 ,
. base . cra_flags = CRYPTO_ALG_INTERNAL ,
. base . cra_blocksize = CAMELLIA_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct camellia_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = CAMELLIA_MIN_KEY_SIZE ,
. max_keysize = CAMELLIA_MAX_KEY_SIZE ,
. setkey = camellia_setkey ,
. encrypt = ecb_encrypt ,
. decrypt = ecb_decrypt ,
} , {
. base . cra_name = " __cbc(camellia) " ,
. base . cra_driver_name = " __cbc-camellia-aesni " ,
. base . cra_priority = 400 ,
. base . cra_flags = CRYPTO_ALG_INTERNAL ,
. base . cra_blocksize = CAMELLIA_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct camellia_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = CAMELLIA_MIN_KEY_SIZE ,
. max_keysize = CAMELLIA_MAX_KEY_SIZE ,
. ivsize = CAMELLIA_BLOCK_SIZE ,
. setkey = camellia_setkey ,
. encrypt = cbc_encrypt ,
. decrypt = cbc_decrypt ,
} , {
. base . cra_name = " __ctr(camellia) " ,
. base . cra_driver_name = " __ctr-camellia-aesni " ,
. base . cra_priority = 400 ,
. base . cra_flags = CRYPTO_ALG_INTERNAL ,
. base . cra_blocksize = 1 ,
. base . cra_ctxsize = sizeof ( struct camellia_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = CAMELLIA_MIN_KEY_SIZE ,
. max_keysize = CAMELLIA_MAX_KEY_SIZE ,
. ivsize = CAMELLIA_BLOCK_SIZE ,
. chunksize = CAMELLIA_BLOCK_SIZE ,
. setkey = camellia_setkey ,
. encrypt = ctr_crypt ,
. decrypt = ctr_crypt ,
} , {
. base . cra_name = " __xts(camellia) " ,
. base . cra_driver_name = " __xts-camellia-aesni " ,
. base . cra_priority = 400 ,
. base . cra_flags = CRYPTO_ALG_INTERNAL ,
. base . cra_blocksize = CAMELLIA_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct camellia_xts_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = 2 * CAMELLIA_MIN_KEY_SIZE ,
. max_keysize = 2 * CAMELLIA_MAX_KEY_SIZE ,
. ivsize = CAMELLIA_BLOCK_SIZE ,
. setkey = xts_camellia_setkey ,
. encrypt = xts_encrypt ,
. decrypt = xts_decrypt ,
2012-10-26 14:49:01 +03:00
} ,
2018-02-19 23:48:23 -08:00
} ;
static struct simd_skcipher_alg * camellia_simd_algs [ ARRAY_SIZE ( camellia_algs ) ] ;
2012-10-26 14:49:01 +03:00
static int __init camellia_aesni_init ( void )
{
2015-04-28 09:59:44 +02:00
const char * feature_name ;
2012-10-26 14:49:01 +03:00
2016-04-04 22:24:56 +02:00
if ( ! boot_cpu_has ( X86_FEATURE_AVX ) | |
2016-04-04 22:24:55 +02:00
! boot_cpu_has ( X86_FEATURE_AES ) | |
! boot_cpu_has ( X86_FEATURE_OSXSAVE ) ) {
2015-10-06 12:31:33 +01:00
pr_info ( " AVX or AES-NI instructions are not detected. \n " ) ;
return - ENODEV ;
}
2015-09-02 16:31:26 -07:00
if ( ! cpu_has_xfeatures ( XFEATURE_MASK_SSE | XFEATURE_MASK_YMM ,
& feature_name ) ) {
2015-04-28 09:59:44 +02:00
pr_info ( " CPU feature '%s' is not supported. \n " , feature_name ) ;
2012-10-26 14:49:01 +03:00
return - ENODEV ;
}
2018-02-19 23:48:23 -08:00
return simd_register_skciphers_compat ( camellia_algs ,
ARRAY_SIZE ( camellia_algs ) ,
camellia_simd_algs ) ;
2012-10-26 14:49:01 +03:00
}
static void __exit camellia_aesni_fini ( void )
{
2018-02-19 23:48:23 -08:00
simd_unregister_skciphers ( camellia_algs , ARRAY_SIZE ( camellia_algs ) ,
camellia_simd_algs ) ;
2012-10-26 14:49:01 +03:00
}
module_init ( camellia_aesni_init ) ;
module_exit ( camellia_aesni_fini ) ;
MODULE_LICENSE ( " GPL " ) ;
MODULE_DESCRIPTION ( " Camellia Cipher Algorithm, AES-NI/AVX optimized " ) ;
2014-11-20 17:05:53 -08:00
MODULE_ALIAS_CRYPTO ( " camellia " ) ;
MODULE_ALIAS_CRYPTO ( " camellia-asm " ) ;