2019-05-19 15:08:20 +03:00
// SPDX-License-Identifier: GPL-2.0-only
2012-08-20 04:11:37 +04:00
/* Glue code for SHA256 hashing optimized for sparc64 crypto opcodes.
*
* This is based largely upon crypto / sha256_generic . c
*
* Copyright ( c ) Jean - Luc Cooke < jlcooke @ certainkey . com >
* Copyright ( c ) Andrew McDonald < andrew @ mcdonald . org . uk >
* Copyright ( c ) 2002 James Morris < jmorris @ intercode . com . au >
* SHA224 Support Copyright 2007 Intel Corporation < jonathan . lynch @ intel . com >
*/
# define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
# include <crypto/internal/hash.h>
# include <linux/init.h>
# include <linux/module.h>
# include <linux/mm.h>
# include <linux/types.h>
2020-11-13 08:20:21 +03:00
# include <crypto/sha2.h>
2021-12-20 12:23:17 +03:00
# include <crypto/sha256_base.h>
2012-08-20 04:11:37 +04:00
# include <asm/pstate.h>
# include <asm/elf.h>
2012-09-15 20:06:30 +04:00
# include "opcodes.h"
2012-08-20 04:11:37 +04:00
asmlinkage void sha256_sparc64_transform ( u32 * digest , const char * data ,
unsigned int rounds ) ;
static void __sha256_sparc64_update ( struct sha256_state * sctx , const u8 * data ,
unsigned int len , unsigned int partial )
{
unsigned int done = 0 ;
sctx - > count + = len ;
if ( partial ) {
done = SHA256_BLOCK_SIZE - partial ;
memcpy ( sctx - > buf + partial , data , done ) ;
sha256_sparc64_transform ( sctx - > state , sctx - > buf , 1 ) ;
}
if ( len - done > = SHA256_BLOCK_SIZE ) {
const unsigned int rounds = ( len - done ) / SHA256_BLOCK_SIZE ;
sha256_sparc64_transform ( sctx - > state , data + done , rounds ) ;
done + = rounds * SHA256_BLOCK_SIZE ;
}
memcpy ( sctx - > buf , data + done , len - done ) ;
}
static int sha256_sparc64_update ( struct shash_desc * desc , const u8 * data ,
unsigned int len )
{
struct sha256_state * sctx = shash_desc_ctx ( desc ) ;
unsigned int partial = sctx - > count % SHA256_BLOCK_SIZE ;
/* Handle the fast case right here */
if ( partial + len < SHA256_BLOCK_SIZE ) {
sctx - > count + = len ;
memcpy ( sctx - > buf + partial , data , len ) ;
} else
__sha256_sparc64_update ( sctx , data , len , partial ) ;
return 0 ;
}
static int sha256_sparc64_final ( struct shash_desc * desc , u8 * out )
{
struct sha256_state * sctx = shash_desc_ctx ( desc ) ;
unsigned int i , index , padlen ;
__be32 * dst = ( __be32 * ) out ;
__be64 bits ;
static const u8 padding [ SHA256_BLOCK_SIZE ] = { 0x80 , } ;
bits = cpu_to_be64 ( sctx - > count < < 3 ) ;
/* Pad out to 56 mod 64 and append length */
index = sctx - > count % SHA256_BLOCK_SIZE ;
padlen = ( index < 56 ) ? ( 56 - index ) : ( ( SHA256_BLOCK_SIZE + 56 ) - index ) ;
/* We need to fill a whole block for __sha256_sparc64_update() */
if ( padlen < = 56 ) {
sctx - > count + = padlen ;
memcpy ( sctx - > buf + index , padding , padlen ) ;
} else {
__sha256_sparc64_update ( sctx , padding , padlen , index ) ;
}
__sha256_sparc64_update ( sctx , ( const u8 * ) & bits , sizeof ( bits ) , 56 ) ;
/* Store state in digest */
for ( i = 0 ; i < 8 ; i + + )
dst [ i ] = cpu_to_be32 ( sctx - > state [ i ] ) ;
/* Wipe context */
memset ( sctx , 0 , sizeof ( * sctx ) ) ;
return 0 ;
}
static int sha224_sparc64_final ( struct shash_desc * desc , u8 * hash )
{
u8 D [ SHA256_DIGEST_SIZE ] ;
sha256_sparc64_final ( desc , D ) ;
memcpy ( hash , D , SHA224_DIGEST_SIZE ) ;
2014-11-30 20:03:43 +03:00
memzero_explicit ( D , SHA256_DIGEST_SIZE ) ;
2012-08-20 04:11:37 +04:00
return 0 ;
}
static int sha256_sparc64_export ( struct shash_desc * desc , void * out )
{
struct sha256_state * sctx = shash_desc_ctx ( desc ) ;
memcpy ( out , sctx , sizeof ( * sctx ) ) ;
return 0 ;
}
static int sha256_sparc64_import ( struct shash_desc * desc , const void * in )
{
struct sha256_state * sctx = shash_desc_ctx ( desc ) ;
memcpy ( sctx , in , sizeof ( * sctx ) ) ;
return 0 ;
}
2020-07-08 19:39:39 +03:00
static struct shash_alg sha256_alg = {
2012-08-20 04:11:37 +04:00
. digestsize = SHA256_DIGEST_SIZE ,
2021-12-20 12:23:17 +03:00
. init = sha256_base_init ,
2012-08-20 04:11:37 +04:00
. update = sha256_sparc64_update ,
. final = sha256_sparc64_final ,
. export = sha256_sparc64_export ,
. import = sha256_sparc64_import ,
. descsize = sizeof ( struct sha256_state ) ,
. statesize = sizeof ( struct sha256_state ) ,
. base = {
. cra_name = " sha256 " ,
. cra_driver_name = " sha256-sparc64 " ,
2012-09-15 20:06:30 +04:00
. cra_priority = SPARC_CR_OPCODE_PRIORITY ,
2012-08-20 04:11:37 +04:00
. cra_blocksize = SHA256_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} ;
2020-07-08 19:39:39 +03:00
static struct shash_alg sha224_alg = {
2012-08-20 04:11:37 +04:00
. digestsize = SHA224_DIGEST_SIZE ,
2021-12-20 12:23:17 +03:00
. init = sha224_base_init ,
2012-08-20 04:11:37 +04:00
. update = sha256_sparc64_update ,
. final = sha224_sparc64_final ,
. descsize = sizeof ( struct sha256_state ) ,
. base = {
. cra_name = " sha224 " ,
. cra_driver_name = " sha224-sparc64 " ,
2012-09-15 20:06:30 +04:00
. cra_priority = SPARC_CR_OPCODE_PRIORITY ,
2012-08-20 04:11:37 +04:00
. cra_blocksize = SHA224_BLOCK_SIZE ,
. cra_module = THIS_MODULE ,
}
} ;
static bool __init sparc64_has_sha256_opcode ( void )
{
unsigned long cfr ;
if ( ! ( sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO ) )
return false ;
__asm__ __volatile__ ( " rd %%asr26, %0 " : " =r " ( cfr ) ) ;
if ( ! ( cfr & CFR_SHA256 ) )
return false ;
return true ;
}
static int __init sha256_sparc64_mod_init ( void )
{
if ( sparc64_has_sha256_opcode ( ) ) {
2020-07-08 19:39:39 +03:00
int ret = crypto_register_shash ( & sha224_alg ) ;
2012-08-20 04:11:37 +04:00
if ( ret < 0 )
return ret ;
2020-07-08 19:39:39 +03:00
ret = crypto_register_shash ( & sha256_alg ) ;
2012-08-20 04:11:37 +04:00
if ( ret < 0 ) {
2020-07-08 19:39:39 +03:00
crypto_unregister_shash ( & sha224_alg ) ;
2012-08-20 04:11:37 +04:00
return ret ;
}
pr_info ( " Using sparc64 sha256 opcode optimized SHA-256/SHA-224 implementation \n " ) ;
return 0 ;
}
pr_info ( " sparc64 sha256 opcode not available. \n " ) ;
return - ENODEV ;
}
static void __exit sha256_sparc64_mod_fini ( void )
{
2020-07-08 19:39:39 +03:00
crypto_unregister_shash ( & sha224_alg ) ;
crypto_unregister_shash ( & sha256_alg ) ;
2012-08-20 04:11:37 +04:00
}
module_init ( sha256_sparc64_mod_init ) ;
module_exit ( sha256_sparc64_mod_fini ) ;
MODULE_LICENSE ( " GPL " ) ;
MODULE_DESCRIPTION ( " SHA-224 and SHA-256 Secure Hash Algorithm, sparc64 sha256 opcode accelerated " ) ;
2014-11-21 04:05:53 +03:00
MODULE_ALIAS_CRYPTO ( " sha224 " ) ;
MODULE_ALIAS_CRYPTO ( " sha256 " ) ;
2012-11-10 08:53:32 +04:00
# include "crop_devid.c"