2014-02-10 11:26:29 +01:00
/*
* aes - ccm - glue . c - AES - CCM transform for ARMv8 with Crypto Extensions
*
2017-07-24 11:28:12 +01:00
* Copyright ( C ) 2013 - 2017 Linaro Ltd < ard . biesheuvel @ linaro . org >
2014-02-10 11:26:29 +01:00
*
* This program is free software ; you can redistribute it and / or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation .
*/
# include <asm/neon.h>
2017-07-24 11:28:12 +01:00
# include <asm/simd.h>
2014-02-10 11:26:29 +01:00
# include <asm/unaligned.h>
# include <crypto/aes.h>
# include <crypto/scatterwalk.h>
2015-04-22 15:06:27 +08:00
# include <crypto/internal/aead.h>
2016-11-22 20:08:14 +08:00
# include <crypto/internal/skcipher.h>
2014-02-10 11:26:29 +01:00
# include <linux/module.h>
2014-11-03 16:50:01 +00:00
# include "aes-ce-setkey.h"
2014-02-10 11:26:29 +01:00
static int num_rounds ( struct crypto_aes_ctx * ctx )
{
/*
* # of rounds specified by AES :
* 128 bit key 10 rounds
* 192 bit key 12 rounds
* 256 bit key 14 rounds
* = > n byte key = > 6 + ( n / 4 ) rounds
*/
return 6 + ctx - > key_length / 4 ;
}
asmlinkage void ce_aes_ccm_auth_data ( u8 mac [ ] , u8 const in [ ] , u32 abytes ,
u32 * macp , u32 const rk [ ] , u32 rounds ) ;
asmlinkage void ce_aes_ccm_encrypt ( u8 out [ ] , u8 const in [ ] , u32 cbytes ,
u32 const rk [ ] , u32 rounds , u8 mac [ ] ,
u8 ctr [ ] ) ;
asmlinkage void ce_aes_ccm_decrypt ( u8 out [ ] , u8 const in [ ] , u32 cbytes ,
u32 const rk [ ] , u32 rounds , u8 mac [ ] ,
u8 ctr [ ] ) ;
asmlinkage void ce_aes_ccm_final ( u8 mac [ ] , u8 const ctr [ ] , u32 const rk [ ] ,
u32 rounds ) ;
2017-07-24 11:28:12 +01:00
asmlinkage void __aes_arm64_encrypt ( u32 * rk , u8 * out , const u8 * in , int rounds ) ;
2014-02-10 11:26:29 +01:00
static int ccm_setkey ( struct crypto_aead * tfm , const u8 * in_key ,
unsigned int key_len )
{
struct crypto_aes_ctx * ctx = crypto_aead_ctx ( tfm ) ;
int ret ;
2014-11-03 16:50:01 +00:00
ret = ce_aes_expandkey ( ctx , in_key , key_len ) ;
2014-02-10 11:26:29 +01:00
if ( ! ret )
return 0 ;
tfm - > base . crt_flags | = CRYPTO_TFM_RES_BAD_KEY_LEN ;
return - EINVAL ;
}
static int ccm_setauthsize ( struct crypto_aead * tfm , unsigned int authsize )
{
if ( ( authsize & 1 ) | | authsize < 4 )
return - EINVAL ;
return 0 ;
}
static int ccm_init_mac ( struct aead_request * req , u8 maciv [ ] , u32 msglen )
{
struct crypto_aead * aead = crypto_aead_reqtfm ( req ) ;
__be32 * n = ( __be32 * ) & maciv [ AES_BLOCK_SIZE - 8 ] ;
u32 l = req - > iv [ 0 ] + 1 ;
/* verify that CCM dimension 'L' is set correctly in the IV */
if ( l < 2 | | l > 8 )
return - EINVAL ;
/* verify that msglen can in fact be represented in L bytes */
if ( l < 4 & & msglen > > ( 8 * l ) )
return - EOVERFLOW ;
/*
* Even if the CCM spec allows L values of up to 8 , the Linux cryptoapi
* uses a u32 type to represent msglen so the top 4 bytes are always 0.
*/
n [ 0 ] = 0 ;
n [ 1 ] = cpu_to_be32 ( msglen ) ;
memcpy ( maciv , req - > iv , AES_BLOCK_SIZE - l ) ;
/*
* Meaning of byte 0 according to CCM spec ( RFC 3610 / NIST 800 - 38 C )
* - bits 0. .2 : max # of bytes required to represent msglen , minus 1
* ( already set by caller )
* - bits 3. .5 : size of auth tag ( 1 = > 4 bytes , 2 = > 6 bytes , etc )
* - bit 6 : indicates presence of authenticate - only data
*/
maciv [ 0 ] | = ( crypto_aead_authsize ( aead ) - 2 ) < < 2 ;
if ( req - > assoclen )
maciv [ 0 ] | = 0x40 ;
memset ( & req - > iv [ AES_BLOCK_SIZE - l ] , 0 , l ) ;
return 0 ;
}
2017-07-24 11:28:12 +01:00
static void ccm_update_mac ( struct crypto_aes_ctx * key , u8 mac [ ] , u8 const in [ ] ,
u32 abytes , u32 * macp , bool use_neon )
{
if ( likely ( use_neon ) ) {
ce_aes_ccm_auth_data ( mac , in , abytes , macp , key - > key_enc ,
num_rounds ( key ) ) ;
} else {
if ( * macp > 0 & & * macp < AES_BLOCK_SIZE ) {
int added = min ( abytes , AES_BLOCK_SIZE - * macp ) ;
crypto_xor ( & mac [ * macp ] , in , added ) ;
* macp + = added ;
in + = added ;
abytes - = added ;
}
while ( abytes > AES_BLOCK_SIZE ) {
__aes_arm64_encrypt ( key - > key_enc , mac , mac ,
num_rounds ( key ) ) ;
crypto_xor ( mac , in , AES_BLOCK_SIZE ) ;
in + = AES_BLOCK_SIZE ;
abytes - = AES_BLOCK_SIZE ;
}
if ( abytes > 0 ) {
__aes_arm64_encrypt ( key - > key_enc , mac , mac ,
num_rounds ( key ) ) ;
crypto_xor ( mac , in , abytes ) ;
* macp = abytes ;
} else {
* macp = 0 ;
}
}
}
static void ccm_calculate_auth_mac ( struct aead_request * req , u8 mac [ ] ,
bool use_neon )
2014-02-10 11:26:29 +01:00
{
struct crypto_aead * aead = crypto_aead_reqtfm ( req ) ;
struct crypto_aes_ctx * ctx = crypto_aead_ctx ( aead ) ;
struct __packed { __be16 l ; __be32 h ; u16 len ; } ltag ;
struct scatter_walk walk ;
u32 len = req - > assoclen ;
u32 macp = 0 ;
/* prepend the AAD with a length tag */
if ( len < 0xff00 ) {
ltag . l = cpu_to_be16 ( len ) ;
ltag . len = 2 ;
} else {
ltag . l = cpu_to_be16 ( 0xfffe ) ;
put_unaligned_be32 ( len , & ltag . h ) ;
ltag . len = 6 ;
}
2017-07-24 11:28:12 +01:00
ccm_update_mac ( ctx , mac , ( u8 * ) & ltag , ltag . len , & macp , use_neon ) ;
2015-07-14 16:53:19 +08:00
scatterwalk_start ( & walk , req - > src ) ;
2014-02-10 11:26:29 +01:00
do {
u32 n = scatterwalk_clamp ( & walk , len ) ;
u8 * p ;
if ( ! n ) {
scatterwalk_start ( & walk , sg_next ( walk . sg ) ) ;
n = scatterwalk_clamp ( & walk , len ) ;
}
p = scatterwalk_map ( & walk ) ;
2017-07-24 11:28:12 +01:00
ccm_update_mac ( ctx , mac , p , n , & macp , use_neon ) ;
2014-02-10 11:26:29 +01:00
len - = n ;
scatterwalk_unmap ( p ) ;
scatterwalk_advance ( & walk , n ) ;
scatterwalk_done ( & walk , 0 , len ) ;
} while ( len ) ;
}
2017-07-24 11:28:12 +01:00
static int ccm_crypt_fallback ( struct skcipher_walk * walk , u8 mac [ ] , u8 iv0 [ ] ,
struct crypto_aes_ctx * ctx , bool enc )
{
u8 buf [ AES_BLOCK_SIZE ] ;
int err = 0 ;
while ( walk - > nbytes ) {
int blocks = walk - > nbytes / AES_BLOCK_SIZE ;
u32 tail = walk - > nbytes % AES_BLOCK_SIZE ;
u8 * dst = walk - > dst . virt . addr ;
u8 * src = walk - > src . virt . addr ;
u32 nbytes = walk - > nbytes ;
if ( nbytes = = walk - > total & & tail > 0 ) {
blocks + + ;
tail = 0 ;
}
do {
u32 bsize = AES_BLOCK_SIZE ;
if ( nbytes < AES_BLOCK_SIZE )
bsize = nbytes ;
crypto_inc ( walk - > iv , AES_BLOCK_SIZE ) ;
__aes_arm64_encrypt ( ctx - > key_enc , buf , walk - > iv ,
num_rounds ( ctx ) ) ;
__aes_arm64_encrypt ( ctx - > key_enc , mac , mac ,
num_rounds ( ctx ) ) ;
if ( enc )
crypto_xor ( mac , src , bsize ) ;
crypto_xor_cpy ( dst , src , buf , bsize ) ;
if ( ! enc )
crypto_xor ( mac , dst , bsize ) ;
dst + = bsize ;
src + = bsize ;
nbytes - = bsize ;
} while ( - - blocks ) ;
err = skcipher_walk_done ( walk , tail ) ;
}
if ( ! err ) {
__aes_arm64_encrypt ( ctx - > key_enc , buf , iv0 , num_rounds ( ctx ) ) ;
__aes_arm64_encrypt ( ctx - > key_enc , mac , mac , num_rounds ( ctx ) ) ;
crypto_xor ( mac , buf , AES_BLOCK_SIZE ) ;
}
return err ;
}
2014-02-10 11:26:29 +01:00
static int ccm_encrypt ( struct aead_request * req )
{
struct crypto_aead * aead = crypto_aead_reqtfm ( req ) ;
struct crypto_aes_ctx * ctx = crypto_aead_ctx ( aead ) ;
2016-11-22 20:08:14 +08:00
struct skcipher_walk walk ;
2014-02-10 11:26:29 +01:00
u8 __aligned ( 8 ) mac [ AES_BLOCK_SIZE ] ;
u8 buf [ AES_BLOCK_SIZE ] ;
u32 len = req - > cryptlen ;
2017-07-24 11:28:12 +01:00
bool use_neon = may_use_simd ( ) ;
2014-02-10 11:26:29 +01:00
int err ;
err = ccm_init_mac ( req , mac , len ) ;
if ( err )
return err ;
2017-07-24 11:28:12 +01:00
if ( likely ( use_neon ) )
kernel_neon_begin ( ) ;
2014-02-10 11:26:29 +01:00
if ( req - > assoclen )
2017-07-24 11:28:12 +01:00
ccm_calculate_auth_mac ( req , mac , use_neon ) ;
2014-02-10 11:26:29 +01:00
/* preserve the original iv for the final round */
memcpy ( buf , req - > iv , AES_BLOCK_SIZE ) ;
2016-11-30 21:17:24 +08:00
err = skcipher_walk_aead_encrypt ( & walk , req , true ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
if ( likely ( use_neon ) ) {
while ( walk . nbytes ) {
u32 tail = walk . nbytes % AES_BLOCK_SIZE ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
if ( walk . nbytes = = walk . total )
tail = 0 ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
ce_aes_ccm_encrypt ( walk . dst . virt . addr ,
walk . src . virt . addr ,
walk . nbytes - tail , ctx - > key_enc ,
num_rounds ( ctx ) , mac , walk . iv ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
err = skcipher_walk_done ( & walk , tail ) ;
}
if ( ! err )
ce_aes_ccm_final ( mac , buf , ctx - > key_enc ,
num_rounds ( ctx ) ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
kernel_neon_end ( ) ;
} else {
err = ccm_crypt_fallback ( & walk , mac , buf , ctx , true ) ;
}
2014-02-10 11:26:29 +01:00
if ( err )
return err ;
/* copy authtag to end of dst */
2016-11-22 20:08:14 +08:00
scatterwalk_map_and_copy ( mac , req - > dst , req - > assoclen + req - > cryptlen ,
2014-02-10 11:26:29 +01:00
crypto_aead_authsize ( aead ) , 1 ) ;
return 0 ;
}
static int ccm_decrypt ( struct aead_request * req )
{
struct crypto_aead * aead = crypto_aead_reqtfm ( req ) ;
struct crypto_aes_ctx * ctx = crypto_aead_ctx ( aead ) ;
unsigned int authsize = crypto_aead_authsize ( aead ) ;
2016-11-22 20:08:14 +08:00
struct skcipher_walk walk ;
2014-02-10 11:26:29 +01:00
u8 __aligned ( 8 ) mac [ AES_BLOCK_SIZE ] ;
u8 buf [ AES_BLOCK_SIZE ] ;
u32 len = req - > cryptlen - authsize ;
2017-07-24 11:28:12 +01:00
bool use_neon = may_use_simd ( ) ;
2014-02-10 11:26:29 +01:00
int err ;
err = ccm_init_mac ( req , mac , len ) ;
if ( err )
return err ;
2017-07-24 11:28:12 +01:00
if ( likely ( use_neon ) )
kernel_neon_begin ( ) ;
2014-02-10 11:26:29 +01:00
if ( req - > assoclen )
2017-07-24 11:28:12 +01:00
ccm_calculate_auth_mac ( req , mac , use_neon ) ;
2014-02-10 11:26:29 +01:00
/* preserve the original iv for the final round */
memcpy ( buf , req - > iv , AES_BLOCK_SIZE ) ;
2016-11-30 21:17:24 +08:00
err = skcipher_walk_aead_decrypt ( & walk , req , true ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
if ( likely ( use_neon ) ) {
while ( walk . nbytes ) {
u32 tail = walk . nbytes % AES_BLOCK_SIZE ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
if ( walk . nbytes = = walk . total )
tail = 0 ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
ce_aes_ccm_decrypt ( walk . dst . virt . addr ,
walk . src . virt . addr ,
walk . nbytes - tail , ctx - > key_enc ,
num_rounds ( ctx ) , mac , walk . iv ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
err = skcipher_walk_done ( & walk , tail ) ;
}
if ( ! err )
ce_aes_ccm_final ( mac , buf , ctx - > key_enc ,
num_rounds ( ctx ) ) ;
2014-02-10 11:26:29 +01:00
2017-07-24 11:28:12 +01:00
kernel_neon_end ( ) ;
} else {
err = ccm_crypt_fallback ( & walk , mac , buf , ctx , false ) ;
}
2014-02-10 11:26:29 +01:00
if ( err )
return err ;
/* compare calculated auth tag with the stored one */
2016-11-22 20:08:14 +08:00
scatterwalk_map_and_copy ( buf , req - > src ,
req - > assoclen + req - > cryptlen - authsize ,
2014-02-10 11:26:29 +01:00
authsize , 0 ) ;
2015-07-14 16:53:19 +08:00
if ( crypto_memneq ( mac , buf , authsize ) )
2014-02-10 11:26:29 +01:00
return - EBADMSG ;
return 0 ;
}
2015-07-14 16:53:19 +08:00
static struct aead_alg ccm_aes_alg = {
. base = {
. cra_name = " ccm(aes) " ,
. cra_driver_name = " ccm-aes-ce " ,
. cra_priority = 300 ,
. cra_blocksize = 1 ,
. cra_ctxsize = sizeof ( struct crypto_aes_ctx ) ,
. cra_module = THIS_MODULE ,
} ,
. ivsize = AES_BLOCK_SIZE ,
2016-11-22 20:08:14 +08:00
. chunksize = AES_BLOCK_SIZE ,
2015-07-14 16:53:19 +08:00
. maxauthsize = AES_BLOCK_SIZE ,
. setkey = ccm_setkey ,
. setauthsize = ccm_setauthsize ,
. encrypt = ccm_encrypt ,
. decrypt = ccm_decrypt ,
2014-02-10 11:26:29 +01:00
} ;
static int __init aes_mod_init ( void )
{
if ( ! ( elf_hwcap & HWCAP_AES ) )
return - ENODEV ;
2015-07-14 16:53:19 +08:00
return crypto_register_aead ( & ccm_aes_alg ) ;
2014-02-10 11:26:29 +01:00
}
static void __exit aes_mod_exit ( void )
{
2015-07-14 16:53:19 +08:00
crypto_unregister_aead ( & ccm_aes_alg ) ;
2014-02-10 11:26:29 +01:00
}
module_init ( aes_mod_init ) ;
module_exit ( aes_mod_exit ) ;
MODULE_DESCRIPTION ( " Synchronous AES in CCM mode using ARMv8 Crypto Extensions " ) ;
MODULE_AUTHOR ( " Ard Biesheuvel <ard.biesheuvel@linaro.org> " ) ;
MODULE_LICENSE ( " GPL v2 " ) ;
2014-11-20 17:05:53 -08:00
MODULE_ALIAS_CRYPTO ( " ccm(aes) " ) ;