2019-06-04 11:11:33 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2014-03-06 12:23:33 +04:00
/*
* sha1 - ce - glue . c - SHA - 1 secure hash using ARMv8 Crypto Extensions
*
2017-07-24 13:28:08 +03:00
* Copyright ( C ) 2014 - 2017 Linaro Ltd < ard . biesheuvel @ linaro . org >
2014-03-06 12:23:33 +04:00
*/
# include <asm/neon.h>
2017-07-24 13:28:08 +03:00
# include <asm/simd.h>
2014-03-06 12:23:33 +04:00
# include <asm/unaligned.h>
# include <crypto/internal/hash.h>
2019-03-13 08:12:50 +03:00
# include <crypto/internal/simd.h>
2020-11-13 08:20:21 +03:00
# include <crypto/sha1.h>
2015-04-09 13:55:44 +03:00
# include <crypto/sha1_base.h>
2014-03-06 12:23:33 +04:00
# include <linux/cpufeature.h>
# include <linux/crypto.h>
# include <linux/module.h>
MODULE_DESCRIPTION ( " SHA1 secure hash using ARMv8 Crypto Extensions " ) ;
MODULE_AUTHOR ( " Ard Biesheuvel <ard.biesheuvel@linaro.org> " ) ;
MODULE_LICENSE ( " GPL v2 " ) ;
2021-01-14 21:10:10 +03:00
MODULE_ALIAS_CRYPTO ( " sha1 " ) ;
2014-03-06 12:23:33 +04:00
2015-04-09 13:55:44 +03:00
struct sha1_ce_state {
struct sha1_state sst ;
u32 finalize ;
} ;
2014-03-06 12:23:33 +04:00
2020-08-25 04:34:28 +03:00
extern const u32 sha1_ce_offsetof_count ;
extern const u32 sha1_ce_offsetof_finalize ;
2021-02-03 14:36:19 +03:00
asmlinkage int sha1_ce_transform ( struct sha1_ce_state * sst , u8 const * src ,
int blocks ) ;
2014-03-06 12:23:33 +04:00
2019-11-28 02:55:03 +03:00
static void __sha1_ce_transform ( struct sha1_state * sst , u8 const * src ,
int blocks )
{
2021-02-03 14:36:19 +03:00
while ( blocks ) {
int rem ;
kernel_neon_begin ( ) ;
rem = sha1_ce_transform ( container_of ( sst , struct sha1_ce_state ,
sst ) , src , blocks ) ;
kernel_neon_end ( ) ;
src + = ( blocks - rem ) * SHA1_BLOCK_SIZE ;
blocks = rem ;
}
2019-11-28 02:55:03 +03:00
}
2017-04-26 19:11:32 +03:00
const u32 sha1_ce_offsetof_count = offsetof ( struct sha1_ce_state , sst . count ) ;
const u32 sha1_ce_offsetof_finalize = offsetof ( struct sha1_ce_state , finalize ) ;
2015-04-09 13:55:44 +03:00
static int sha1_ce_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
2014-03-06 12:23:33 +04:00
{
2015-04-09 13:55:44 +03:00
struct sha1_ce_state * sctx = shash_desc_ctx ( desc ) ;
2014-03-06 12:23:33 +04:00
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) )
2017-07-24 13:28:08 +03:00
return crypto_sha1_update ( desc , data , len ) ;
2015-04-09 13:55:44 +03:00
sctx - > finalize = 0 ;
2019-11-28 02:55:03 +03:00
sha1_base_do_update ( desc , data , len , __sha1_ce_transform ) ;
2014-03-06 12:23:33 +04:00
return 0 ;
}
2015-04-09 13:55:44 +03:00
static int sha1_ce_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
2014-03-06 12:23:33 +04:00
{
2015-04-09 13:55:44 +03:00
struct sha1_ce_state * sctx = shash_desc_ctx ( desc ) ;
2019-05-28 15:41:52 +03:00
bool finalize = ! sctx - > sst . count & & ! ( len % SHA1_BLOCK_SIZE ) & & len ;
2014-03-06 12:23:33 +04:00
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) )
2017-07-24 13:28:08 +03:00
return crypto_sha1_finup ( desc , data , len , out ) ;
2014-03-06 12:23:33 +04:00
/*
2015-04-09 13:55:44 +03:00
* Allow the asm code to perform the finalization if there is no
* partial data and the input is a round multiple of the block size .
2014-03-06 12:23:33 +04:00
*/
2015-04-09 13:55:44 +03:00
sctx - > finalize = finalize ;
2014-03-06 12:23:33 +04:00
2019-11-28 02:55:03 +03:00
sha1_base_do_update ( desc , data , len , __sha1_ce_transform ) ;
2015-04-09 13:55:44 +03:00
if ( ! finalize )
2019-11-28 02:55:03 +03:00
sha1_base_do_finalize ( desc , __sha1_ce_transform ) ;
2015-04-09 13:55:44 +03:00
return sha1_base_finish ( desc , out ) ;
2014-03-06 12:23:33 +04:00
}
2015-04-09 13:55:44 +03:00
static int sha1_ce_final ( struct shash_desc * desc , u8 * out )
2014-03-06 12:23:33 +04:00
{
2015-05-06 16:54:31 +03:00
struct sha1_ce_state * sctx = shash_desc_ctx ( desc ) ;
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) )
2017-07-24 13:28:08 +03:00
return crypto_sha1_finup ( desc , NULL , 0 , out ) ;
2015-05-06 16:54:31 +03:00
sctx - > finalize = 0 ;
2019-11-28 02:55:03 +03:00
sha1_base_do_finalize ( desc , __sha1_ce_transform ) ;
2015-04-09 13:55:44 +03:00
return sha1_base_finish ( desc , out ) ;
2014-03-06 12:23:33 +04:00
}
2020-02-24 17:47:41 +03:00
static int sha1_ce_export ( struct shash_desc * desc , void * out )
{
struct sha1_ce_state * sctx = shash_desc_ctx ( desc ) ;
memcpy ( out , & sctx - > sst , sizeof ( struct sha1_state ) ) ;
return 0 ;
}
static int sha1_ce_import ( struct shash_desc * desc , const void * in )
{
struct sha1_ce_state * sctx = shash_desc_ctx ( desc ) ;
memcpy ( & sctx - > sst , in , sizeof ( struct sha1_state ) ) ;
sctx - > finalize = 0 ;
return 0 ;
}
2014-03-06 12:23:33 +04:00
static struct shash_alg alg = {
2015-04-09 13:55:44 +03:00
. init = sha1_base_init ,
. update = sha1_ce_update ,
. final = sha1_ce_final ,
. finup = sha1_ce_finup ,
2020-02-24 17:47:41 +03:00
. import = sha1_ce_import ,
. export = sha1_ce_export ,
2015-04-09 13:55:44 +03:00
. descsize = sizeof ( struct sha1_ce_state ) ,
2020-02-24 17:47:41 +03:00
. statesize = sizeof ( struct sha1_state ) ,
2014-03-06 12:23:33 +04:00
. digestsize = SHA1_DIGEST_SIZE ,
. base = {
. cra_name = " sha1 " ,
. cra_driver_name = " sha1-ce " ,
. cra_priority = 200 ,
. cra_blocksize = SHA1_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} ;
static int __init sha1_ce_mod_init ( void )
{
return crypto_register_shash ( & alg ) ;
}
static void __exit sha1_ce_mod_fini ( void )
{
crypto_unregister_shash ( & alg ) ;
}
module_cpu_feature_match ( SHA1 , sha1_ce_mod_init ) ;
module_exit ( sha1_ce_mod_fini ) ;