2019-06-04 11:11:33 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2014-02-05 21:13:38 +04:00
/*
* aes - ce - cipher . c - core AES cipher using ARMv8 Crypto Extensions
*
2017-07-24 13:28:10 +03:00
* Copyright ( C ) 2013 - 2017 Linaro Ltd < ard . biesheuvel @ linaro . org >
2014-02-05 21:13:38 +04:00
*/
# include <asm/neon.h>
2017-07-24 13:28:11 +03:00
# include <asm/simd.h>
2017-07-24 13:28:10 +03:00
# include <asm/unaligned.h>
2014-02-05 21:13:38 +04:00
# include <crypto/aes.h>
2019-03-13 08:12:50 +03:00
# include <crypto/internal/simd.h>
2014-02-05 21:13:38 +04:00
# include <linux/cpufeature.h>
# include <linux/crypto.h>
# include <linux/module.h>
2014-11-03 19:50:01 +03:00
# include "aes-ce-setkey.h"
2014-02-05 21:13:38 +04:00
MODULE_DESCRIPTION ( " Synchronous AES cipher using ARMv8 Crypto Extensions " ) ;
MODULE_AUTHOR ( " Ard Biesheuvel <ard.biesheuvel@linaro.org> " ) ;
MODULE_LICENSE ( " GPL v2 " ) ;
struct aes_block {
u8 b [ AES_BLOCK_SIZE ] ;
} ;
2017-11-21 16:40:17 +03:00
asmlinkage void __aes_ce_encrypt ( u32 * rk , u8 * out , const u8 * in , int rounds ) ;
asmlinkage void __aes_ce_decrypt ( u32 * rk , u8 * out , const u8 * in , int rounds ) ;
asmlinkage u32 __aes_ce_sub ( u32 l ) ;
asmlinkage void __aes_ce_invert ( struct aes_block * out ,
const struct aes_block * in ) ;
2014-02-05 21:13:38 +04:00
static int num_rounds ( struct crypto_aes_ctx * ctx )
{
/*
* # of rounds specified by AES :
* 128 bit key 10 rounds
* 192 bit key 12 rounds
* 256 bit key 14 rounds
* = > n byte key = > 6 + ( n / 4 ) rounds
*/
return 6 + ctx - > key_length / 4 ;
}
static void aes_cipher_encrypt ( struct crypto_tfm * tfm , u8 dst [ ] , u8 const src [ ] )
{
struct crypto_aes_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) ) {
2019-07-02 22:41:36 +03:00
aes_encrypt ( ctx , dst , src ) ;
2017-07-24 13:28:11 +03:00
return ;
}
kernel_neon_begin ( ) ;
2017-11-21 16:40:17 +03:00
__aes_ce_encrypt ( ctx - > key_enc , dst , src , num_rounds ( ctx ) ) ;
2014-02-05 21:13:38 +04:00
kernel_neon_end ( ) ;
}
static void aes_cipher_decrypt ( struct crypto_tfm * tfm , u8 dst [ ] , u8 const src [ ] )
{
struct crypto_aes_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) ) {
2019-07-02 22:41:36 +03:00
aes_decrypt ( ctx , dst , src ) ;
2017-07-24 13:28:11 +03:00
return ;
}
kernel_neon_begin ( ) ;
2017-11-21 16:40:17 +03:00
__aes_ce_decrypt ( ctx - > key_dec , dst , src , num_rounds ( ctx ) ) ;
2014-02-05 21:13:38 +04:00
kernel_neon_end ( ) ;
}
2014-11-03 19:50:01 +03:00
int ce_aes_expandkey ( struct crypto_aes_ctx * ctx , const u8 * in_key ,
unsigned int key_len )
{
/*
* The AES key schedule round constants
*/
static u8 const rcon [ ] = {
0x01 , 0x02 , 0x04 , 0x08 , 0x10 , 0x20 , 0x40 , 0x80 , 0x1b , 0x36 ,
} ;
u32 kwords = key_len / sizeof ( u32 ) ;
struct aes_block * key_enc , * key_dec ;
int i , j ;
if ( key_len ! = AES_KEYSIZE_128 & &
key_len ! = AES_KEYSIZE_192 & &
key_len ! = AES_KEYSIZE_256 )
return - EINVAL ;
ctx - > key_length = key_len ;
2017-07-24 13:28:10 +03:00
for ( i = 0 ; i < kwords ; i + + )
ctx - > key_enc [ i ] = get_unaligned_le32 ( in_key + i * sizeof ( u32 ) ) ;
2014-11-03 19:50:01 +03:00
2017-07-24 13:28:11 +03:00
kernel_neon_begin ( ) ;
2014-11-03 19:50:01 +03:00
for ( i = 0 ; i < sizeof ( rcon ) ; i + + ) {
u32 * rki = ctx - > key_enc + ( i * kwords ) ;
u32 * rko = rki + kwords ;
2017-11-21 16:40:17 +03:00
rko [ 0 ] = ror32 ( __aes_ce_sub ( rki [ kwords - 1 ] ) , 8 ) ^ rcon [ i ] ^ rki [ 0 ] ;
2014-11-03 19:50:01 +03:00
rko [ 1 ] = rko [ 0 ] ^ rki [ 1 ] ;
rko [ 2 ] = rko [ 1 ] ^ rki [ 2 ] ;
rko [ 3 ] = rko [ 2 ] ^ rki [ 3 ] ;
if ( key_len = = AES_KEYSIZE_192 ) {
if ( i > = 7 )
break ;
rko [ 4 ] = rko [ 3 ] ^ rki [ 4 ] ;
rko [ 5 ] = rko [ 4 ] ^ rki [ 5 ] ;
} else if ( key_len = = AES_KEYSIZE_256 ) {
if ( i > = 6 )
break ;
2017-11-21 16:40:17 +03:00
rko [ 4 ] = __aes_ce_sub ( rko [ 3 ] ) ^ rki [ 4 ] ;
2014-11-03 19:50:01 +03:00
rko [ 5 ] = rko [ 4 ] ^ rki [ 5 ] ;
rko [ 6 ] = rko [ 5 ] ^ rki [ 6 ] ;
rko [ 7 ] = rko [ 6 ] ^ rki [ 7 ] ;
}
}
/*
* Generate the decryption keys for the Equivalent Inverse Cipher .
* This involves reversing the order of the round keys , and applying
* the Inverse Mix Columns transformation on all but the first and
* the last one .
*/
key_enc = ( struct aes_block * ) ctx - > key_enc ;
key_dec = ( struct aes_block * ) ctx - > key_dec ;
j = num_rounds ( ctx ) ;
key_dec [ 0 ] = key_enc [ j ] ;
for ( i = 1 , j - - ; j > 0 ; i + + , j - - )
2017-11-21 16:40:17 +03:00
__aes_ce_invert ( key_dec + i , key_enc + j ) ;
2014-11-03 19:50:01 +03:00
key_dec [ i ] = key_enc [ 0 ] ;
kernel_neon_end ( ) ;
return 0 ;
}
EXPORT_SYMBOL ( ce_aes_expandkey ) ;
int ce_aes_setkey ( struct crypto_tfm * tfm , const u8 * in_key ,
unsigned int key_len )
{
struct crypto_aes_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
int ret ;
ret = ce_aes_expandkey ( ctx , in_key , key_len ) ;
if ( ! ret )
return 0 ;
tfm - > crt_flags | = CRYPTO_TFM_RES_BAD_KEY_LEN ;
return - EINVAL ;
}
EXPORT_SYMBOL ( ce_aes_setkey ) ;
2014-02-05 21:13:38 +04:00
static struct crypto_alg aes_alg = {
. cra_name = " aes " ,
. cra_driver_name = " aes-ce " ,
2015-11-16 15:12:48 +03:00
. cra_priority = 250 ,
2014-02-05 21:13:38 +04:00
. cra_flags = CRYPTO_ALG_TYPE_CIPHER ,
. cra_blocksize = AES_BLOCK_SIZE ,
. cra_ctxsize = sizeof ( struct crypto_aes_ctx ) ,
. cra_module = THIS_MODULE ,
. cra_cipher = {
. cia_min_keysize = AES_MIN_KEY_SIZE ,
. cia_max_keysize = AES_MAX_KEY_SIZE ,
2014-11-03 19:50:01 +03:00
. cia_setkey = ce_aes_setkey ,
2014-02-05 21:13:38 +04:00
. cia_encrypt = aes_cipher_encrypt ,
. cia_decrypt = aes_cipher_decrypt
}
} ;
static int __init aes_mod_init ( void )
{
return crypto_register_alg ( & aes_alg ) ;
}
static void __exit aes_mod_exit ( void )
{
crypto_unregister_alg ( & aes_alg ) ;
}
module_cpu_feature_match ( AES , aes_mod_init ) ;
module_exit ( aes_mod_exit ) ;