2019-05-27 09:55:06 +03:00
// SPDX-License-Identifier: GPL-2.0-or-later
2014-06-09 21:59:54 +04:00
/*
* Glue Code for assembler optimized version of 3 DES
*
* Copyright © 2014 Jussi Kivilinna < jussi . kivilinna @ mbnet . fi >
*
* CBC & ECB parts based on code ( crypto / cbc . c , ecb . c ) by :
* Copyright ( c ) 2006 Herbert Xu < herbert @ gondor . apana . org . au >
* CTR part based on code ( crypto / ctr . c ) by :
* ( C ) Copyright IBM Corp . 2007 - Joy Latten < latten @ us . ibm . com >
*/
2018-02-20 10:48:17 +03:00
# include <crypto/algapi.h>
2019-08-15 12:01:09 +03:00
# include <crypto/internal/des.h>
2018-02-20 10:48:17 +03:00
# include <crypto/internal/skcipher.h>
2014-06-09 21:59:54 +04:00
# include <linux/crypto.h>
# include <linux/init.h>
# include <linux/module.h>
# include <linux/types.h>
struct des3_ede_x86_ctx {
u32 enc_expkey [ DES3_EDE_EXPKEY_WORDS ] ;
u32 dec_expkey [ DES3_EDE_EXPKEY_WORDS ] ;
} ;
/* regular block cipher functions */
asmlinkage void des3_ede_x86_64_crypt_blk ( const u32 * expkey , u8 * dst ,
const u8 * src ) ;
/* 3-way parallel cipher functions */
asmlinkage void des3_ede_x86_64_crypt_blk_3way ( const u32 * expkey , u8 * dst ,
const u8 * src ) ;
static inline void des3_ede_enc_blk ( struct des3_ede_x86_ctx * ctx , u8 * dst ,
const u8 * src )
{
u32 * enc_ctx = ctx - > enc_expkey ;
des3_ede_x86_64_crypt_blk ( enc_ctx , dst , src ) ;
}
static inline void des3_ede_dec_blk ( struct des3_ede_x86_ctx * ctx , u8 * dst ,
const u8 * src )
{
u32 * dec_ctx = ctx - > dec_expkey ;
des3_ede_x86_64_crypt_blk ( dec_ctx , dst , src ) ;
}
static inline void des3_ede_enc_blk_3way ( struct des3_ede_x86_ctx * ctx , u8 * dst ,
const u8 * src )
{
u32 * enc_ctx = ctx - > enc_expkey ;
des3_ede_x86_64_crypt_blk_3way ( enc_ctx , dst , src ) ;
}
static inline void des3_ede_dec_blk_3way ( struct des3_ede_x86_ctx * ctx , u8 * dst ,
const u8 * src )
{
u32 * dec_ctx = ctx - > dec_expkey ;
des3_ede_x86_64_crypt_blk_3way ( dec_ctx , dst , src ) ;
}
static void des3_ede_x86_encrypt ( struct crypto_tfm * tfm , u8 * dst , const u8 * src )
{
des3_ede_enc_blk ( crypto_tfm_ctx ( tfm ) , dst , src ) ;
}
static void des3_ede_x86_decrypt ( struct crypto_tfm * tfm , u8 * dst , const u8 * src )
{
des3_ede_dec_blk ( crypto_tfm_ctx ( tfm ) , dst , src ) ;
}
2018-02-20 10:48:17 +03:00
static int ecb_crypt ( struct skcipher_request * req , const u32 * expkey )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
const unsigned int bsize = DES3_EDE_BLOCK_SIZE ;
struct skcipher_walk walk ;
2014-06-09 21:59:54 +04:00
unsigned int nbytes ;
int err ;
2018-02-20 10:48:17 +03:00
err = skcipher_walk_virt ( & walk , req , false ) ;
2014-06-09 21:59:54 +04:00
2018-02-20 10:48:17 +03:00
while ( ( nbytes = walk . nbytes ) ) {
u8 * wsrc = walk . src . virt . addr ;
u8 * wdst = walk . dst . virt . addr ;
2014-06-09 21:59:54 +04:00
/* Process four block batch */
if ( nbytes > = bsize * 3 ) {
do {
des3_ede_x86_64_crypt_blk_3way ( expkey , wdst ,
wsrc ) ;
wsrc + = bsize * 3 ;
wdst + = bsize * 3 ;
nbytes - = bsize * 3 ;
} while ( nbytes > = bsize * 3 ) ;
if ( nbytes < bsize )
goto done ;
}
/* Handle leftovers */
do {
des3_ede_x86_64_crypt_blk ( expkey , wdst , wsrc ) ;
wsrc + = bsize ;
wdst + = bsize ;
nbytes - = bsize ;
} while ( nbytes > = bsize ) ;
done :
2018-02-20 10:48:17 +03:00
err = skcipher_walk_done ( & walk , nbytes ) ;
2014-06-09 21:59:54 +04:00
}
return err ;
}
2018-02-20 10:48:17 +03:00
static int ecb_encrypt ( struct skcipher_request * req )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct des3_ede_x86_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2014-06-09 21:59:54 +04:00
2018-02-20 10:48:17 +03:00
return ecb_crypt ( req , ctx - > enc_expkey ) ;
2014-06-09 21:59:54 +04:00
}
2018-02-20 10:48:17 +03:00
static int ecb_decrypt ( struct skcipher_request * req )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct des3_ede_x86_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
2014-06-09 21:59:54 +04:00
2018-02-20 10:48:17 +03:00
return ecb_crypt ( req , ctx - > dec_expkey ) ;
2014-06-09 21:59:54 +04:00
}
2018-02-20 10:48:17 +03:00
static unsigned int __cbc_encrypt ( struct des3_ede_x86_ctx * ctx ,
struct skcipher_walk * walk )
2014-06-09 21:59:54 +04:00
{
unsigned int bsize = DES3_EDE_BLOCK_SIZE ;
unsigned int nbytes = walk - > nbytes ;
u64 * src = ( u64 * ) walk - > src . virt . addr ;
u64 * dst = ( u64 * ) walk - > dst . virt . addr ;
u64 * iv = ( u64 * ) walk - > iv ;
do {
* dst = * src ^ * iv ;
des3_ede_enc_blk ( ctx , ( u8 * ) dst , ( u8 * ) dst ) ;
iv = dst ;
src + = 1 ;
dst + = 1 ;
nbytes - = bsize ;
} while ( nbytes > = bsize ) ;
* ( u64 * ) walk - > iv = * iv ;
return nbytes ;
}
2018-02-20 10:48:17 +03:00
static int cbc_encrypt ( struct skcipher_request * req )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct des3_ede_x86_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
struct skcipher_walk walk ;
unsigned int nbytes ;
2014-06-09 21:59:54 +04:00
int err ;
2018-02-20 10:48:17 +03:00
err = skcipher_walk_virt ( & walk , req , false ) ;
2014-06-09 21:59:54 +04:00
while ( ( nbytes = walk . nbytes ) ) {
2018-02-20 10:48:17 +03:00
nbytes = __cbc_encrypt ( ctx , & walk ) ;
err = skcipher_walk_done ( & walk , nbytes ) ;
2014-06-09 21:59:54 +04:00
}
return err ;
}
2018-02-20 10:48:17 +03:00
static unsigned int __cbc_decrypt ( struct des3_ede_x86_ctx * ctx ,
struct skcipher_walk * walk )
2014-06-09 21:59:54 +04:00
{
unsigned int bsize = DES3_EDE_BLOCK_SIZE ;
unsigned int nbytes = walk - > nbytes ;
u64 * src = ( u64 * ) walk - > src . virt . addr ;
u64 * dst = ( u64 * ) walk - > dst . virt . addr ;
u64 ivs [ 3 - 1 ] ;
u64 last_iv ;
/* Start of the last block. */
src + = nbytes / bsize - 1 ;
dst + = nbytes / bsize - 1 ;
last_iv = * src ;
/* Process four block batch */
if ( nbytes > = bsize * 3 ) {
do {
nbytes - = bsize * 3 - bsize ;
src - = 3 - 1 ;
dst - = 3 - 1 ;
ivs [ 0 ] = src [ 0 ] ;
ivs [ 1 ] = src [ 1 ] ;
des3_ede_dec_blk_3way ( ctx , ( u8 * ) dst , ( u8 * ) src ) ;
dst [ 1 ] ^ = ivs [ 0 ] ;
dst [ 2 ] ^ = ivs [ 1 ] ;
nbytes - = bsize ;
if ( nbytes < bsize )
goto done ;
* dst ^ = * ( src - 1 ) ;
src - = 1 ;
dst - = 1 ;
} while ( nbytes > = bsize * 3 ) ;
}
/* Handle leftovers */
for ( ; ; ) {
des3_ede_dec_blk ( ctx , ( u8 * ) dst , ( u8 * ) src ) ;
nbytes - = bsize ;
if ( nbytes < bsize )
break ;
* dst ^ = * ( src - 1 ) ;
src - = 1 ;
dst - = 1 ;
}
done :
* dst ^ = * ( u64 * ) walk - > iv ;
* ( u64 * ) walk - > iv = last_iv ;
return nbytes ;
}
2018-02-20 10:48:17 +03:00
static int cbc_decrypt ( struct skcipher_request * req )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct des3_ede_x86_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
struct skcipher_walk walk ;
unsigned int nbytes ;
2014-06-09 21:59:54 +04:00
int err ;
2018-02-20 10:48:17 +03:00
err = skcipher_walk_virt ( & walk , req , false ) ;
2014-06-09 21:59:54 +04:00
while ( ( nbytes = walk . nbytes ) ) {
2018-02-20 10:48:17 +03:00
nbytes = __cbc_decrypt ( ctx , & walk ) ;
err = skcipher_walk_done ( & walk , nbytes ) ;
2014-06-09 21:59:54 +04:00
}
return err ;
}
static void ctr_crypt_final ( struct des3_ede_x86_ctx * ctx ,
2018-02-20 10:48:17 +03:00
struct skcipher_walk * walk )
2014-06-09 21:59:54 +04:00
{
u8 * ctrblk = walk - > iv ;
u8 keystream [ DES3_EDE_BLOCK_SIZE ] ;
u8 * src = walk - > src . virt . addr ;
u8 * dst = walk - > dst . virt . addr ;
unsigned int nbytes = walk - > nbytes ;
des3_ede_enc_blk ( ctx , keystream , ctrblk ) ;
crypto: algapi - make crypto_xor() take separate dst and src arguments
There are quite a number of occurrences in the kernel of the pattern
if (dst != src)
memcpy(dst, src, walk.total % AES_BLOCK_SIZE);
crypto_xor(dst, final, walk.total % AES_BLOCK_SIZE);
or
crypto_xor(keystream, src, nbytes);
memcpy(dst, keystream, nbytes);
where crypto_xor() is preceded or followed by a memcpy() invocation
that is only there because crypto_xor() uses its output parameter as
one of the inputs. To avoid having to add new instances of this pattern
in the arm64 code, which will be refactored to implement non-SIMD
fallbacks, add an alternative implementation called crypto_xor_cpy(),
taking separate input and output arguments. This removes the need for
the separate memcpy().
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2017-07-24 13:28:04 +03:00
crypto_xor_cpy ( dst , keystream , src , nbytes ) ;
2014-06-09 21:59:54 +04:00
crypto_inc ( ctrblk , DES3_EDE_BLOCK_SIZE ) ;
}
2018-02-20 10:48:17 +03:00
static unsigned int __ctr_crypt ( struct des3_ede_x86_ctx * ctx ,
struct skcipher_walk * walk )
2014-06-09 21:59:54 +04:00
{
unsigned int bsize = DES3_EDE_BLOCK_SIZE ;
unsigned int nbytes = walk - > nbytes ;
2014-06-23 20:39:17 +04:00
__be64 * src = ( __be64 * ) walk - > src . virt . addr ;
__be64 * dst = ( __be64 * ) walk - > dst . virt . addr ;
2014-06-09 21:59:54 +04:00
u64 ctrblk = be64_to_cpu ( * ( __be64 * ) walk - > iv ) ;
__be64 ctrblocks [ 3 ] ;
/* Process four block batch */
if ( nbytes > = bsize * 3 ) {
do {
/* create ctrblks for parallel encrypt */
ctrblocks [ 0 ] = cpu_to_be64 ( ctrblk + + ) ;
ctrblocks [ 1 ] = cpu_to_be64 ( ctrblk + + ) ;
ctrblocks [ 2 ] = cpu_to_be64 ( ctrblk + + ) ;
des3_ede_enc_blk_3way ( ctx , ( u8 * ) ctrblocks ,
( u8 * ) ctrblocks ) ;
dst [ 0 ] = src [ 0 ] ^ ctrblocks [ 0 ] ;
dst [ 1 ] = src [ 1 ] ^ ctrblocks [ 1 ] ;
dst [ 2 ] = src [ 2 ] ^ ctrblocks [ 2 ] ;
src + = 3 ;
dst + = 3 ;
} while ( ( nbytes - = bsize * 3 ) > = bsize * 3 ) ;
if ( nbytes < bsize )
goto done ;
}
/* Handle leftovers */
do {
ctrblocks [ 0 ] = cpu_to_be64 ( ctrblk + + ) ;
des3_ede_enc_blk ( ctx , ( u8 * ) ctrblocks , ( u8 * ) ctrblocks ) ;
dst [ 0 ] = src [ 0 ] ^ ctrblocks [ 0 ] ;
src + = 1 ;
dst + = 1 ;
} while ( ( nbytes - = bsize ) > = bsize ) ;
done :
* ( __be64 * ) walk - > iv = cpu_to_be64 ( ctrblk ) ;
return nbytes ;
}
2018-02-20 10:48:17 +03:00
static int ctr_crypt ( struct skcipher_request * req )
2014-06-09 21:59:54 +04:00
{
2018-02-20 10:48:17 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct des3_ede_x86_ctx * ctx = crypto_skcipher_ctx ( tfm ) ;
struct skcipher_walk walk ;
unsigned int nbytes ;
2014-06-09 21:59:54 +04:00
int err ;
2018-02-20 10:48:17 +03:00
err = skcipher_walk_virt ( & walk , req , false ) ;
2014-06-09 21:59:54 +04:00
while ( ( nbytes = walk . nbytes ) > = DES3_EDE_BLOCK_SIZE ) {
2018-02-20 10:48:17 +03:00
nbytes = __ctr_crypt ( ctx , & walk ) ;
err = skcipher_walk_done ( & walk , nbytes ) ;
2014-06-09 21:59:54 +04:00
}
2018-02-20 10:48:17 +03:00
if ( nbytes ) {
ctr_crypt_final ( ctx , & walk ) ;
err = skcipher_walk_done ( & walk , 0 ) ;
2014-06-09 21:59:54 +04:00
}
return err ;
}
static int des3_ede_x86_setkey ( struct crypto_tfm * tfm , const u8 * key ,
unsigned int keylen )
{
struct des3_ede_x86_ctx * ctx = crypto_tfm_ctx ( tfm ) ;
u32 i , j , tmp ;
int err ;
2019-08-15 12:01:07 +03:00
err = crypto_des3_ede_verify_key ( tfm , key ) ;
if ( err )
return err ;
2014-06-09 21:59:54 +04:00
/* Generate encryption context using generic implementation. */
err = __des3_ede_setkey ( ctx - > enc_expkey , & tfm - > crt_flags , key , keylen ) ;
if ( err < 0 )
return err ;
/* Fix encryption context for this implementation and form decryption
* context . */
j = DES3_EDE_EXPKEY_WORDS - 2 ;
for ( i = 0 ; i < DES3_EDE_EXPKEY_WORDS ; i + = 2 , j - = 2 ) {
tmp = ror32 ( ctx - > enc_expkey [ i + 1 ] , 4 ) ;
ctx - > enc_expkey [ i + 1 ] = tmp ;
ctx - > dec_expkey [ j + 0 ] = ctx - > enc_expkey [ i + 0 ] ;
ctx - > dec_expkey [ j + 1 ] = tmp ;
}
return 0 ;
}
2018-02-20 10:48:17 +03:00
static int des3_ede_x86_setkey_skcipher ( struct crypto_skcipher * tfm ,
const u8 * key ,
unsigned int keylen )
{
return des3_ede_x86_setkey ( & tfm - > base , key , keylen ) ;
}
static struct crypto_alg des3_ede_cipher = {
2014-06-09 21:59:54 +04:00
. cra_name = " des3_ede " ,
. cra_driver_name = " des3_ede-asm " ,
. cra_priority = 200 ,
. cra_flags = CRYPTO_ALG_TYPE_CIPHER ,
. cra_blocksize = DES3_EDE_BLOCK_SIZE ,
. cra_ctxsize = sizeof ( struct des3_ede_x86_ctx ) ,
. cra_alignmask = 0 ,
. cra_module = THIS_MODULE ,
. cra_u = {
. cipher = {
. cia_min_keysize = DES3_EDE_KEY_SIZE ,
. cia_max_keysize = DES3_EDE_KEY_SIZE ,
. cia_setkey = des3_ede_x86_setkey ,
. cia_encrypt = des3_ede_x86_encrypt ,
. cia_decrypt = des3_ede_x86_decrypt ,
}
}
2018-02-20 10:48:17 +03:00
} ;
2018-03-02 23:29:46 +03:00
static struct skcipher_alg des3_ede_skciphers [ ] = {
2018-02-20 10:48:17 +03:00
{
. base . cra_name = " ecb(des3_ede) " ,
. base . cra_driver_name = " ecb-des3_ede-asm " ,
. base . cra_priority = 300 ,
. base . cra_blocksize = DES3_EDE_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct des3_ede_x86_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = DES3_EDE_KEY_SIZE ,
. max_keysize = DES3_EDE_KEY_SIZE ,
. setkey = des3_ede_x86_setkey_skcipher ,
. encrypt = ecb_encrypt ,
. decrypt = ecb_decrypt ,
} , {
. base . cra_name = " cbc(des3_ede) " ,
. base . cra_driver_name = " cbc-des3_ede-asm " ,
. base . cra_priority = 300 ,
. base . cra_blocksize = DES3_EDE_BLOCK_SIZE ,
. base . cra_ctxsize = sizeof ( struct des3_ede_x86_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = DES3_EDE_KEY_SIZE ,
. max_keysize = DES3_EDE_KEY_SIZE ,
. ivsize = DES3_EDE_BLOCK_SIZE ,
. setkey = des3_ede_x86_setkey_skcipher ,
. encrypt = cbc_encrypt ,
. decrypt = cbc_decrypt ,
} , {
. base . cra_name = " ctr(des3_ede) " ,
. base . cra_driver_name = " ctr-des3_ede-asm " ,
. base . cra_priority = 300 ,
. base . cra_blocksize = 1 ,
. base . cra_ctxsize = sizeof ( struct des3_ede_x86_ctx ) ,
. base . cra_module = THIS_MODULE ,
. min_keysize = DES3_EDE_KEY_SIZE ,
. max_keysize = DES3_EDE_KEY_SIZE ,
. ivsize = DES3_EDE_BLOCK_SIZE ,
. chunksize = DES3_EDE_BLOCK_SIZE ,
. setkey = des3_ede_x86_setkey_skcipher ,
. encrypt = ctr_crypt ,
. decrypt = ctr_crypt ,
}
} ;
2014-06-09 21:59:54 +04:00
static bool is_blacklisted_cpu ( void )
{
if ( boot_cpu_data . x86_vendor ! = X86_VENDOR_INTEL )
return false ;
if ( boot_cpu_data . x86 = = 0x0f ) {
/*
* On Pentium 4 , des3_ede - x86_64 is slower than generic C
* implementation because use of 64 bit rotates ( which are really
* slow on P4 ) . Therefore blacklist P4s .
*/
return true ;
}
return false ;
}
static int force ;
module_param ( force , int , 0 ) ;
MODULE_PARM_DESC ( force , " Force module load, ignore CPU blacklist " ) ;
static int __init des3_ede_x86_init ( void )
{
2018-02-20 10:48:17 +03:00
int err ;
2014-06-09 21:59:54 +04:00
if ( ! force & & is_blacklisted_cpu ( ) ) {
pr_info ( " des3_ede-x86_64: performance on this CPU would be suboptimal: disabling des3_ede-x86_64. \n " ) ;
return - ENODEV ;
}
2018-02-20 10:48:17 +03:00
err = crypto_register_alg ( & des3_ede_cipher ) ;
if ( err )
return err ;
err = crypto_register_skciphers ( des3_ede_skciphers ,
ARRAY_SIZE ( des3_ede_skciphers ) ) ;
if ( err )
crypto_unregister_alg ( & des3_ede_cipher ) ;
return err ;
2014-06-09 21:59:54 +04:00
}
static void __exit des3_ede_x86_fini ( void )
{
2018-02-20 10:48:17 +03:00
crypto_unregister_alg ( & des3_ede_cipher ) ;
crypto_unregister_skciphers ( des3_ede_skciphers ,
ARRAY_SIZE ( des3_ede_skciphers ) ) ;
2014-06-09 21:59:54 +04:00
}
module_init ( des3_ede_x86_init ) ;
module_exit ( des3_ede_x86_fini ) ;
MODULE_LICENSE ( " GPL " ) ;
MODULE_DESCRIPTION ( " Triple DES EDE Cipher Algorithm, asm optimized " ) ;
2014-11-21 04:05:53 +03:00
MODULE_ALIAS_CRYPTO ( " des3_ede " ) ;
MODULE_ALIAS_CRYPTO ( " des3_ede-asm " ) ;
2014-06-09 21:59:54 +04:00
MODULE_AUTHOR ( " Jussi Kivilinna <jussi.kivilinna@iki.fi> " ) ;