2019-05-27 08:55:01 +02:00
// SPDX-License-Identifier: GPL-2.0-or-later
2011-08-04 20:19:25 +02:00
/*
* Cryptographic API .
*
* Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
* Supplemental SSE3 instructions .
*
* This file is based on sha1_generic . c
*
* Copyright ( c ) Alan Smithee .
* Copyright ( c ) Andrew McDonald < andrew @ mcdonald . org . uk >
* Copyright ( c ) Jean - Francois Dive < jef @ linuxbe . org >
* Copyright ( c ) Mathias Krause < minipli @ googlemail . com >
2014-03-20 15:14:00 -07:00
* Copyright ( c ) Chandramouli Narayanan < mouli @ linux . intel . com >
2011-08-04 20:19:25 +02:00
*/
# define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
# include <crypto/internal/hash.h>
2019-03-12 22:12:48 -07:00
# include <crypto/internal/simd.h>
2011-08-04 20:19:25 +02:00
# include <linux/init.h>
# include <linux/module.h>
# include <linux/mm.h>
# include <linux/types.h>
2020-11-12 21:20:21 -08:00
# include <crypto/sha1.h>
2015-04-09 12:55:46 +02:00
# include <crypto/sha1_base.h>
2019-03-12 22:12:48 -07:00
# include <asm/simd.h>
2011-08-04 20:19:25 +02:00
2015-09-16 16:34:53 -07:00
static int sha1_update ( struct shash_desc * desc , const u8 * data ,
2020-01-14 19:57:29 -08:00
unsigned int len , sha1_block_fn * sha1_xform )
2011-08-04 20:19:25 +02:00
{
struct sha1_state * sctx = shash_desc_ctx ( desc ) ;
2019-03-12 22:12:48 -07:00
if ( ! crypto_simd_usable ( ) | |
2015-04-09 12:55:46 +02:00
( sctx - > count % SHA1_BLOCK_SIZE ) + len < SHA1_BLOCK_SIZE )
return crypto_sha1_update ( desc , data , len ) ;
2011-08-04 20:19:25 +02:00
2020-01-14 19:57:29 -08:00
/*
* Make sure struct sha1_state begins directly with the SHA1
* 160 - bit internal state , as this is what the asm functions expect .
*/
2015-04-09 12:55:46 +02:00
BUILD_BUG_ON ( offsetof ( struct sha1_state , state ) ! = 0 ) ;
2011-08-04 20:19:25 +02:00
2015-04-09 12:55:46 +02:00
kernel_fpu_begin ( ) ;
2020-01-14 19:57:29 -08:00
sha1_base_do_update ( desc , data , len , sha1_xform ) ;
2015-04-09 12:55:46 +02:00
kernel_fpu_end ( ) ;
2011-08-04 20:19:25 +02:00
return 0 ;
}
2015-09-16 16:34:53 -07:00
static int sha1_finup ( struct shash_desc * desc , const u8 * data ,
2020-01-14 19:57:29 -08:00
unsigned int len , u8 * out , sha1_block_fn * sha1_xform )
2011-08-04 20:19:25 +02:00
{
2019-03-12 22:12:48 -07:00
if ( ! crypto_simd_usable ( ) )
2015-04-09 12:55:46 +02:00
return crypto_sha1_finup ( desc , data , len , out ) ;
2011-08-04 20:19:25 +02:00
2015-04-09 12:55:46 +02:00
kernel_fpu_begin ( ) ;
if ( len )
2020-01-14 19:57:29 -08:00
sha1_base_do_update ( desc , data , len , sha1_xform ) ;
sha1_base_do_finalize ( desc , sha1_xform ) ;
2015-04-09 12:55:46 +02:00
kernel_fpu_end ( ) ;
2011-08-04 20:19:25 +02:00
2015-04-09 12:55:46 +02:00
return sha1_base_finish ( desc , out ) ;
2011-08-04 20:19:25 +02:00
}
2020-01-14 19:57:29 -08:00
asmlinkage void sha1_transform_ssse3 ( struct sha1_state * state ,
const u8 * data , int blocks ) ;
2015-09-16 16:34:53 -07:00
static int sha1_ssse3_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
2011-08-04 20:19:25 +02:00
{
2020-01-14 19:57:29 -08:00
return sha1_update ( desc , data , len , sha1_transform_ssse3 ) ;
2011-08-04 20:19:25 +02:00
}
2015-09-16 16:34:53 -07:00
static int sha1_ssse3_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
2014-03-20 15:14:00 -07:00
{
2020-01-14 19:57:29 -08:00
return sha1_finup ( desc , data , len , out , sha1_transform_ssse3 ) ;
2015-09-16 16:34:53 -07:00
}
/* Add padding and return the message digest. */
static int sha1_ssse3_final ( struct shash_desc * desc , u8 * out )
{
return sha1_ssse3_finup ( desc , NULL , 0 , out ) ;
2014-03-20 15:14:00 -07:00
}
2015-09-16 16:34:53 -07:00
static struct shash_alg sha1_ssse3_alg = {
2011-08-04 20:19:25 +02:00
. digestsize = SHA1_DIGEST_SIZE ,
2015-04-09 12:55:46 +02:00
. init = sha1_base_init ,
2011-08-04 20:19:25 +02:00
. update = sha1_ssse3_update ,
. final = sha1_ssse3_final ,
2015-04-09 12:55:46 +02:00
. finup = sha1_ssse3_finup ,
2011-08-04 20:19:25 +02:00
. descsize = sizeof ( struct sha1_state ) ,
. base = {
. cra_name = " sha1 " ,
2015-09-16 16:34:53 -07:00
. cra_driver_name = " sha1-ssse3 " ,
2011-08-04 20:19:25 +02:00
. cra_priority = 150 ,
. cra_blocksize = SHA1_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} ;
2015-09-16 16:34:53 -07:00
static int register_sha1_ssse3 ( void )
{
if ( boot_cpu_has ( X86_FEATURE_SSSE3 ) )
return crypto_register_shash ( & sha1_ssse3_alg ) ;
return 0 ;
}
static void unregister_sha1_ssse3 ( void )
{
if ( boot_cpu_has ( X86_FEATURE_SSSE3 ) )
crypto_unregister_shash ( & sha1_ssse3_alg ) ;
}
2020-01-14 19:57:29 -08:00
asmlinkage void sha1_transform_avx ( struct sha1_state * state ,
const u8 * data , int blocks ) ;
2015-09-16 16:34:53 -07:00
static int sha1_avx_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
{
2020-01-14 19:57:29 -08:00
return sha1_update ( desc , data , len , sha1_transform_avx ) ;
2015-09-16 16:34:53 -07:00
}
static int sha1_avx_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
{
2020-01-14 19:57:29 -08:00
return sha1_finup ( desc , data , len , out , sha1_transform_avx ) ;
2015-09-16 16:34:53 -07:00
}
static int sha1_avx_final ( struct shash_desc * desc , u8 * out )
{
return sha1_avx_finup ( desc , NULL , 0 , out ) ;
}
static struct shash_alg sha1_avx_alg = {
. digestsize = SHA1_DIGEST_SIZE ,
. init = sha1_base_init ,
. update = sha1_avx_update ,
. final = sha1_avx_final ,
. finup = sha1_avx_finup ,
. descsize = sizeof ( struct sha1_state ) ,
. base = {
. cra_name = " sha1 " ,
. cra_driver_name = " sha1-avx " ,
. cra_priority = 160 ,
. cra_blocksize = SHA1_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} ;
static bool avx_usable ( void )
2011-08-04 20:19:25 +02:00
{
2015-09-02 16:31:26 -07:00
if ( ! cpu_has_xfeatures ( XFEATURE_MASK_SSE | XFEATURE_MASK_YMM , NULL ) ) {
2016-04-04 22:24:56 +02:00
if ( boot_cpu_has ( X86_FEATURE_AVX ) )
2015-04-28 10:11:24 +02:00
pr_info ( " AVX detected but unusable. \n " ) ;
2011-08-04 20:19:25 +02:00
return false ;
}
return true ;
}
2014-03-24 17:10:37 +01:00
2015-09-16 16:34:53 -07:00
static int register_sha1_avx ( void )
{
if ( avx_usable ( ) )
return crypto_register_shash ( & sha1_avx_alg ) ;
return 0 ;
}
static void unregister_sha1_avx ( void )
2014-03-24 17:10:37 +01:00
{
2015-09-16 16:34:53 -07:00
if ( avx_usable ( ) )
crypto_unregister_shash ( & sha1_avx_alg ) ;
}
# define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
2020-01-14 19:57:29 -08:00
asmlinkage void sha1_transform_avx2 ( struct sha1_state * state ,
const u8 * data , int blocks ) ;
2015-09-16 16:34:53 -07:00
static bool avx2_usable ( void )
2014-03-24 17:10:37 +01:00
{
2017-08-02 13:49:09 -07:00
if ( avx_usable ( ) & & boot_cpu_has ( X86_FEATURE_AVX2 )
2015-09-16 16:34:53 -07:00
& & boot_cpu_has ( X86_FEATURE_BMI1 )
& & boot_cpu_has ( X86_FEATURE_BMI2 ) )
2014-03-24 17:10:37 +01:00
return true ;
return false ;
}
2011-08-04 20:19:25 +02:00
2020-01-14 19:57:29 -08:00
static void sha1_apply_transform_avx2 ( struct sha1_state * state ,
const u8 * data , int blocks )
2011-08-04 20:19:25 +02:00
{
2015-09-16 16:34:53 -07:00
/* Select the optimal transform based on data block size */
2020-01-14 19:57:29 -08:00
if ( blocks > = SHA1_AVX2_BLOCK_OPTSIZE )
sha1_transform_avx2 ( state , data , blocks ) ;
2015-09-16 16:34:53 -07:00
else
2020-01-14 19:57:29 -08:00
sha1_transform_avx ( state , data , blocks ) ;
2015-09-16 16:34:53 -07:00
}
2014-03-24 17:10:37 +01:00
2015-09-16 16:34:53 -07:00
static int sha1_avx2_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
{
2020-01-14 19:57:29 -08:00
return sha1_update ( desc , data , len , sha1_apply_transform_avx2 ) ;
2015-09-16 16:34:53 -07:00
}
2011-08-04 20:19:25 +02:00
2015-09-16 16:34:53 -07:00
static int sha1_avx2_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
{
2020-01-14 19:57:29 -08:00
return sha1_finup ( desc , data , len , out , sha1_apply_transform_avx2 ) ;
2015-09-16 16:34:53 -07:00
}
static int sha1_avx2_final ( struct shash_desc * desc , u8 * out )
{
return sha1_avx2_finup ( desc , NULL , 0 , out ) ;
}
static struct shash_alg sha1_avx2_alg = {
. digestsize = SHA1_DIGEST_SIZE ,
. init = sha1_base_init ,
. update = sha1_avx2_update ,
. final = sha1_avx2_final ,
. finup = sha1_avx2_finup ,
. descsize = sizeof ( struct sha1_state ) ,
. base = {
. cra_name = " sha1 " ,
. cra_driver_name = " sha1-avx2 " ,
. cra_priority = 170 ,
. cra_blocksize = SHA1_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
2014-03-20 15:14:00 -07:00
}
2015-09-16 16:34:53 -07:00
} ;
static int register_sha1_avx2 ( void )
{
if ( avx2_usable ( ) )
return crypto_register_shash ( & sha1_avx2_alg ) ;
return 0 ;
}
static void unregister_sha1_avx2 ( void )
{
if ( avx2_usable ( ) )
crypto_unregister_shash ( & sha1_avx2_alg ) ;
}
2015-09-10 15:27:20 -07:00
# ifdef CONFIG_AS_SHA1_NI
2020-01-14 19:57:29 -08:00
asmlinkage void sha1_ni_transform ( struct sha1_state * digest , const u8 * data ,
int rounds ) ;
2015-09-16 16:34:53 -07:00
static int sha1_ni_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
{
2020-01-14 19:57:29 -08:00
return sha1_update ( desc , data , len , sha1_ni_transform ) ;
2015-09-16 16:34:53 -07:00
}
static int sha1_ni_finup ( struct shash_desc * desc , const u8 * data ,
unsigned int len , u8 * out )
{
2020-01-14 19:57:29 -08:00
return sha1_finup ( desc , data , len , out , sha1_ni_transform ) ;
2015-09-16 16:34:53 -07:00
}
static int sha1_ni_final ( struct shash_desc * desc , u8 * out )
{
return sha1_ni_finup ( desc , NULL , 0 , out ) ;
}
static struct shash_alg sha1_ni_alg = {
. digestsize = SHA1_DIGEST_SIZE ,
. init = sha1_base_init ,
. update = sha1_ni_update ,
. final = sha1_ni_final ,
. finup = sha1_ni_finup ,
. descsize = sizeof ( struct sha1_state ) ,
. base = {
. cra_name = " sha1 " ,
. cra_driver_name = " sha1-ni " ,
. cra_priority = 250 ,
. cra_blocksize = SHA1_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
2015-09-10 15:27:20 -07:00
}
2015-09-16 16:34:53 -07:00
} ;
static int register_sha1_ni ( void )
{
if ( boot_cpu_has ( X86_FEATURE_SHA_NI ) )
return crypto_register_shash ( & sha1_ni_alg ) ;
return 0 ;
}
static void unregister_sha1_ni ( void )
{
if ( boot_cpu_has ( X86_FEATURE_SHA_NI ) )
crypto_unregister_shash ( & sha1_ni_alg ) ;
}
# else
static inline int register_sha1_ni ( void ) { return 0 ; }
static inline void unregister_sha1_ni ( void ) { }
2011-08-04 20:19:25 +02:00
# endif
static int __init sha1_ssse3_mod_init ( void )
{
2015-09-16 16:34:53 -07:00
if ( register_sha1_ssse3 ( ) )
goto fail ;
2014-03-24 17:10:37 +01:00
2015-09-16 16:34:53 -07:00
if ( register_sha1_avx ( ) ) {
unregister_sha1_ssse3 ( ) ;
goto fail ;
2014-03-20 15:14:00 -07:00
}
2011-08-04 20:19:25 +02:00
2015-09-16 16:34:53 -07:00
if ( register_sha1_avx2 ( ) ) {
unregister_sha1_avx ( ) ;
unregister_sha1_ssse3 ( ) ;
goto fail ;
2014-03-20 15:14:00 -07:00
}
2011-08-04 20:19:25 +02:00
2015-09-16 16:34:53 -07:00
if ( register_sha1_ni ( ) ) {
unregister_sha1_avx2 ( ) ;
unregister_sha1_avx ( ) ;
unregister_sha1_ssse3 ( ) ;
goto fail ;
2011-08-04 20:19:25 +02:00
}
2015-09-16 16:34:53 -07:00
return 0 ;
fail :
2011-08-04 20:19:25 +02:00
return - ENODEV ;
}
static void __exit sha1_ssse3_mod_fini ( void )
{
2015-09-16 16:34:53 -07:00
unregister_sha1_ni ( ) ;
unregister_sha1_avx2 ( ) ;
unregister_sha1_avx ( ) ;
unregister_sha1_ssse3 ( ) ;
2011-08-04 20:19:25 +02:00
}
module_init ( sha1_ssse3_mod_init ) ;
module_exit ( sha1_ssse3_mod_fini ) ;
MODULE_LICENSE ( " GPL " ) ;
MODULE_DESCRIPTION ( " SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated " ) ;
2014-11-20 17:05:53 -08:00
MODULE_ALIAS_CRYPTO ( " sha1 " ) ;
2016-05-13 14:02:00 +02:00
MODULE_ALIAS_CRYPTO ( " sha1-ssse3 " ) ;
MODULE_ALIAS_CRYPTO ( " sha1-avx " ) ;
MODULE_ALIAS_CRYPTO ( " sha1-avx2 " ) ;
# ifdef CONFIG_AS_SHA1_NI
MODULE_ALIAS_CRYPTO ( " sha1-ni " ) ;
# endif