2019-05-27 09:55:01 +03:00
// SPDX-License-Identifier: GPL-2.0-or-later
2016-11-20 14:42:01 +03:00
/*
* Linux / arm64 port of the OpenSSL SHA256 implementation for AArch64
*
* Copyright ( c ) 2016 Linaro Ltd . < ard . biesheuvel @ linaro . org >
*/
# include <asm/hwcap.h>
# include <asm/neon.h>
# include <asm/simd.h>
# include <crypto/internal/hash.h>
2019-03-13 08:12:50 +03:00
# include <crypto/internal/simd.h>
2020-11-13 08:20:21 +03:00
# include <crypto/sha2.h>
2016-11-20 14:42:01 +03:00
# include <crypto/sha256_base.h>
# include <linux/types.h>
# include <linux/string.h>
MODULE_DESCRIPTION ( " SHA-224/SHA-256 secure hash for arm64 " ) ;
MODULE_AUTHOR ( " Andy Polyakov <appro@openssl.org> " ) ;
MODULE_AUTHOR ( " Ard Biesheuvel <ard.biesheuvel@linaro.org> " ) ;
MODULE_LICENSE ( " GPL v2 " ) ;
MODULE_ALIAS_CRYPTO ( " sha224 " ) ;
MODULE_ALIAS_CRYPTO ( " sha256 " ) ;
asmlinkage void sha256_block_data_order ( u32 * digest , const void * data ,
unsigned int num_blks ) ;
2017-07-24 13:28:09 +03:00
EXPORT_SYMBOL ( sha256_block_data_order ) ;
2016-11-20 14:42:01 +03:00
2019-11-28 02:55:03 +03:00
static void __sha256_block_data_order ( struct sha256_state * sst , u8 const * src ,
int blocks )
{
sha256_block_data_order ( sst - > state , src , blocks ) ;
}
2016-11-20 14:42:01 +03:00
asmlinkage void sha256_block_neon ( u32 * digest , const void * data ,
unsigned int num_blks ) ;
2019-11-28 02:55:03 +03:00
static void __sha256_block_neon ( struct sha256_state * sst , u8 const * src ,
int blocks )
{
sha256_block_neon ( sst - > state , src , blocks ) ;
}
2019-09-01 23:35:25 +03:00
static int crypto_sha256_arm64_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
2016-11-20 14:42:01 +03:00
{
return sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
2016-11-20 14:42:01 +03:00
}
2019-09-01 23:35:25 +03:00
static int crypto_sha256_arm64_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
2016-11-20 14:42:01 +03:00
{
if ( len )
sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
sha256_base_do_finalize ( desc , __sha256_block_data_order ) ;
2016-11-20 14:42:01 +03:00
return sha256_base_finish ( desc , out ) ;
}
2019-09-01 23:35:25 +03:00
static int crypto_sha256_arm64_final ( struct shash_desc * desc , u8 * out )
2016-11-20 14:42:01 +03:00
{
2019-09-01 23:35:25 +03:00
return crypto_sha256_arm64_finup ( desc , NULL , 0 , out ) ;
2016-11-20 14:42:01 +03:00
}
static struct shash_alg algs [ ] = { {
. digestsize = SHA256_DIGEST_SIZE ,
. init = sha256_base_init ,
2019-09-01 23:35:25 +03:00
. update = crypto_sha256_arm64_update ,
. final = crypto_sha256_arm64_final ,
. finup = crypto_sha256_arm64_finup ,
2016-11-20 14:42:01 +03:00
. descsize = sizeof ( struct sha256_state ) ,
. base . cra_name = " sha256 " ,
. base . cra_driver_name = " sha256-arm64 " ,
2018-07-17 20:09:26 +03:00
. base . cra_priority = 125 ,
2016-11-20 14:42:01 +03:00
. base . cra_blocksize = SHA256_BLOCK_SIZE ,
. base . cra_module = THIS_MODULE ,
} , {
. digestsize = SHA224_DIGEST_SIZE ,
. init = sha224_base_init ,
2019-09-01 23:35:25 +03:00
. update = crypto_sha256_arm64_update ,
. final = crypto_sha256_arm64_final ,
. finup = crypto_sha256_arm64_finup ,
2016-11-20 14:42:01 +03:00
. descsize = sizeof ( struct sha256_state ) ,
. base . cra_name = " sha224 " ,
. base . cra_driver_name = " sha224-arm64 " ,
2018-07-17 20:09:26 +03:00
. base . cra_priority = 125 ,
2016-11-20 14:42:01 +03:00
. base . cra_blocksize = SHA224_BLOCK_SIZE ,
. base . cra_module = THIS_MODULE ,
} } ;
static int sha256_update_neon ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
{
2018-03-10 18:21:54 +03:00
struct sha256_state * sctx = shash_desc_ctx ( desc ) ;
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) )
2016-11-20 14:42:01 +03:00
return sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
2016-11-20 14:42:01 +03:00
2018-03-10 18:21:54 +03:00
while ( len > 0 ) {
unsigned int chunk = len ;
/*
* Don ' t hog the CPU for the entire time it takes to process all
* input when running on a preemptible kernel , but process the
* data block by block instead .
*/
2019-10-15 22:17:49 +03:00
if ( IS_ENABLED ( CONFIG_PREEMPTION ) & &
2018-03-10 18:21:54 +03:00
chunk + sctx - > count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE )
chunk = SHA256_BLOCK_SIZE -
sctx - > count % SHA256_BLOCK_SIZE ;
2016-11-20 14:42:01 +03:00
2018-03-10 18:21:54 +03:00
kernel_neon_begin ( ) ;
2019-11-28 02:55:03 +03:00
sha256_base_do_update ( desc , data , chunk , __sha256_block_neon ) ;
2018-03-10 18:21:54 +03:00
kernel_neon_end ( ) ;
data + = chunk ;
len - = chunk ;
}
2016-11-20 14:42:01 +03:00
return 0 ;
}
static int sha256_finup_neon ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
{
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) ) {
2016-11-20 14:42:01 +03:00
if ( len )
sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
sha256_base_do_finalize ( desc , __sha256_block_data_order ) ;
2016-11-20 14:42:01 +03:00
} else {
if ( len )
2018-03-10 18:21:54 +03:00
sha256_update_neon ( desc , data , len ) ;
kernel_neon_begin ( ) ;
2019-11-28 02:55:03 +03:00
sha256_base_do_finalize ( desc , __sha256_block_neon ) ;
2016-11-20 14:42:01 +03:00
kernel_neon_end ( ) ;
}
return sha256_base_finish ( desc , out ) ;
}
static int sha256_final_neon ( struct shash_desc * desc , u8 * out )
{
return sha256_finup_neon ( desc , NULL , 0 , out ) ;
}
static struct shash_alg neon_algs [ ] = { {
. digestsize = SHA256_DIGEST_SIZE ,
. init = sha256_base_init ,
. update = sha256_update_neon ,
. final = sha256_final_neon ,
. finup = sha256_finup_neon ,
. descsize = sizeof ( struct sha256_state ) ,
. base . cra_name = " sha256 " ,
. base . cra_driver_name = " sha256-arm64-neon " ,
. base . cra_priority = 150 ,
. base . cra_blocksize = SHA256_BLOCK_SIZE ,
. base . cra_module = THIS_MODULE ,
} , {
. digestsize = SHA224_DIGEST_SIZE ,
. init = sha224_base_init ,
. update = sha256_update_neon ,
. final = sha256_final_neon ,
. finup = sha256_finup_neon ,
. descsize = sizeof ( struct sha256_state ) ,
. base . cra_name = " sha224 " ,
. base . cra_driver_name = " sha224-arm64-neon " ,
. base . cra_priority = 150 ,
. base . cra_blocksize = SHA224_BLOCK_SIZE ,
. base . cra_module = THIS_MODULE ,
} } ;
static int __init sha256_mod_init ( void )
{
int ret = crypto_register_shashes ( algs , ARRAY_SIZE ( algs ) ) ;
if ( ret )
return ret ;
2019-04-09 12:52:40 +03:00
if ( cpu_have_named_feature ( ASIMD ) ) {
2016-11-20 14:42:01 +03:00
ret = crypto_register_shashes ( neon_algs , ARRAY_SIZE ( neon_algs ) ) ;
if ( ret )
crypto_unregister_shashes ( algs , ARRAY_SIZE ( algs ) ) ;
}
return ret ;
}
static void __exit sha256_mod_fini ( void )
{
2019-04-09 12:52:40 +03:00
if ( cpu_have_named_feature ( ASIMD ) )
2016-11-20 14:42:01 +03:00
crypto_unregister_shashes ( neon_algs , ARRAY_SIZE ( neon_algs ) ) ;
crypto_unregister_shashes ( algs , ARRAY_SIZE ( algs ) ) ;
}
module_init ( sha256_mod_init ) ;
module_exit ( sha256_mod_fini ) ;