2019-06-04 11:11:33 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2014-03-20 18:35:40 +04:00
/*
* sha2 - ce - glue . c - SHA - 224 / SHA - 256 using ARMv8 Crypto Extensions
*
2017-07-24 13:28:09 +03:00
* Copyright ( C ) 2014 - 2017 Linaro Ltd < ard . biesheuvel @ linaro . org >
2014-03-20 18:35:40 +04:00
*/
# include <asm/neon.h>
2017-07-24 13:28:09 +03:00
# include <asm/simd.h>
2014-03-20 18:35:40 +04:00
# include <asm/unaligned.h>
# include <crypto/internal/hash.h>
2019-03-13 08:12:50 +03:00
# include <crypto/internal/simd.h>
2014-03-20 18:35:40 +04:00
# include <crypto/sha.h>
2015-04-09 13:55:45 +03:00
# include <crypto/sha256_base.h>
2014-03-20 18:35:40 +04:00
# include <linux/cpufeature.h>
# include <linux/crypto.h>
# include <linux/module.h>
MODULE_DESCRIPTION ( " SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions " ) ;
MODULE_AUTHOR ( " Ard Biesheuvel <ard.biesheuvel@linaro.org> " ) ;
MODULE_LICENSE ( " GPL v2 " ) ;
2015-04-09 13:55:45 +03:00
struct sha256_ce_state {
struct sha256_state sst ;
u32 finalize ;
} ;
2014-03-20 18:35:40 +04:00
2015-04-09 13:55:45 +03:00
asmlinkage void sha2_ce_transform ( struct sha256_ce_state * sst , u8 const * src ,
int blocks ) ;
2014-03-20 18:35:40 +04:00
2019-11-28 02:55:03 +03:00
static void __sha2_ce_transform ( struct sha256_state * sst , u8 const * src ,
int blocks )
{
sha2_ce_transform ( container_of ( sst , struct sha256_ce_state , sst ) , src ,
blocks ) ;
}
2017-04-26 19:11:32 +03:00
const u32 sha256_ce_offsetof_count = offsetof ( struct sha256_ce_state ,
sst . count ) ;
const u32 sha256_ce_offsetof_finalize = offsetof ( struct sha256_ce_state ,
finalize ) ;
2017-07-24 13:28:09 +03:00
asmlinkage void sha256_block_data_order ( u32 * digest , u8 const * src , int blocks ) ;
2019-11-28 02:55:03 +03:00
static void __sha256_block_data_order ( struct sha256_state * sst , u8 const * src ,
int blocks )
{
sha256_block_data_order ( sst - > state , src , blocks ) ;
}
2015-04-09 13:55:45 +03:00
static int sha256_ce_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
2014-03-20 18:35:40 +04:00
{
2015-04-09 13:55:45 +03:00
struct sha256_ce_state * sctx = shash_desc_ctx ( desc ) ;
2014-03-20 18:35:40 +04:00
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) )
2017-07-24 13:28:09 +03:00
return sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
2017-07-24 13:28:09 +03:00
2015-04-09 13:55:45 +03:00
sctx - > finalize = 0 ;
2017-07-24 13:28:09 +03:00
kernel_neon_begin ( ) ;
2019-11-28 02:55:03 +03:00
sha256_base_do_update ( desc , data , len , __sha2_ce_transform ) ;
2015-04-09 13:55:45 +03:00
kernel_neon_end ( ) ;
2014-03-20 18:35:40 +04:00
return 0 ;
}
2015-04-09 13:55:45 +03:00
static int sha256_ce_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
2014-03-20 18:35:40 +04:00
{
2015-04-09 13:55:45 +03:00
struct sha256_ce_state * sctx = shash_desc_ctx ( desc ) ;
2019-05-28 17:35:06 +03:00
bool finalize = ! sctx - > sst . count & & ! ( len % SHA256_BLOCK_SIZE ) & & len ;
2014-03-20 18:35:40 +04:00
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) ) {
2017-07-24 13:28:09 +03:00
if ( len )
sha256_base_do_update ( desc , data , len ,
2019-11-28 02:55:03 +03:00
__sha256_block_data_order ) ;
sha256_base_do_finalize ( desc , __sha256_block_data_order ) ;
2017-07-24 13:28:09 +03:00
return sha256_base_finish ( desc , out ) ;
}
2014-03-20 18:35:40 +04:00
/*
2015-04-09 13:55:45 +03:00
* Allow the asm code to perform the finalization if there is no
* partial data and the input is a round multiple of the block size .
2014-03-20 18:35:40 +04:00
*/
2015-04-09 13:55:45 +03:00
sctx - > finalize = finalize ;
2014-03-20 18:35:40 +04:00
2017-07-24 13:28:09 +03:00
kernel_neon_begin ( ) ;
2019-11-28 02:55:03 +03:00
sha256_base_do_update ( desc , data , len , __sha2_ce_transform ) ;
2015-04-09 13:55:45 +03:00
if ( ! finalize )
2019-11-28 02:55:03 +03:00
sha256_base_do_finalize ( desc , __sha2_ce_transform ) ;
2014-03-20 18:35:40 +04:00
kernel_neon_end ( ) ;
2015-04-09 13:55:45 +03:00
return sha256_base_finish ( desc , out ) ;
2014-03-20 18:35:40 +04:00
}
2015-04-09 13:55:45 +03:00
static int sha256_ce_final ( struct shash_desc * desc , u8 * out )
2014-03-20 18:35:40 +04:00
{
2015-05-06 16:54:32 +03:00
struct sha256_ce_state * sctx = shash_desc_ctx ( desc ) ;
2019-03-13 08:12:50 +03:00
if ( ! crypto_simd_usable ( ) ) {
2019-11-28 02:55:03 +03:00
sha256_base_do_finalize ( desc , __sha256_block_data_order ) ;
2017-07-24 13:28:09 +03:00
return sha256_base_finish ( desc , out ) ;
}
2015-05-06 16:54:32 +03:00
sctx - > finalize = 0 ;
2017-07-24 13:28:09 +03:00
kernel_neon_begin ( ) ;
2019-11-28 02:55:03 +03:00
sha256_base_do_finalize ( desc , __sha2_ce_transform ) ;
2015-04-09 13:55:45 +03:00
kernel_neon_end ( ) ;
return sha256_base_finish ( desc , out ) ;
2014-03-20 18:35:40 +04:00
}
static struct shash_alg algs [ ] = { {
2015-04-09 13:55:45 +03:00
. init = sha224_base_init ,
. update = sha256_ce_update ,
. final = sha256_ce_final ,
. finup = sha256_ce_finup ,
. descsize = sizeof ( struct sha256_ce_state ) ,
2014-03-20 18:35:40 +04:00
. digestsize = SHA224_DIGEST_SIZE ,
. base = {
. cra_name = " sha224 " ,
. cra_driver_name = " sha224-ce " ,
. cra_priority = 200 ,
. cra_blocksize = SHA256_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} , {
2015-04-09 13:55:45 +03:00
. init = sha256_base_init ,
. update = sha256_ce_update ,
. final = sha256_ce_final ,
. finup = sha256_ce_finup ,
. descsize = sizeof ( struct sha256_ce_state ) ,
2014-03-20 18:35:40 +04:00
. digestsize = SHA256_DIGEST_SIZE ,
. base = {
. cra_name = " sha256 " ,
. cra_driver_name = " sha256-ce " ,
. cra_priority = 200 ,
. cra_blocksize = SHA256_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} } ;
static int __init sha2_ce_mod_init ( void )
{
return crypto_register_shashes ( algs , ARRAY_SIZE ( algs ) ) ;
}
static void __exit sha2_ce_mod_fini ( void )
{
crypto_unregister_shashes ( algs , ARRAY_SIZE ( algs ) ) ;
}
module_cpu_feature_match ( SHA2 , sha2_ce_mod_init ) ;
module_exit ( sha2_ce_mod_fini ) ;