2015-09-21 21:58:23 +03:00
/*
* Key Wrapping : RFC3394 / NIST SP800 - 38F
*
* Copyright ( C ) 2015 , Stephan Mueller < smueller @ chronox . de >
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
* 1. Redistributions of source code must retain the above copyright
* notice , and the entire permission notice in its entirety ,
* including the disclaimer of warranties .
* 2. Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and / or other materials provided with the distribution .
* 3. The name of the author may not be used to endorse or promote
* products derived from this software without specific prior
* written permission .
*
* ALTERNATIVELY , this product may be distributed under the terms of
* the GNU General Public License , in which case the provisions of the GPL2
* are required INSTEAD OF the above restrictions . ( This clause is
* necessary due to a potential bad interaction between the GPL and
* the restrictions contained in a BSD - style copyright . )
*
* THIS SOFTWARE IS PROVIDED ` ` AS IS ' ' AND ANY EXPRESS OR IMPLIED
* WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE , ALL OF
* WHICH ARE HEREBY DISCLAIMED . IN NO EVENT SHALL THE AUTHOR BE
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR
* BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
* ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE , EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE .
*/
/*
* Note for using key wrapping :
*
* * The result of the encryption operation is the ciphertext starting
* with the 2 nd semiblock . The first semiblock is provided as the IV .
* The IV used to start the encryption operation is the default IV .
*
* * The input for the decryption is the first semiblock handed in as an
* IV . The ciphertext is the data starting with the 2 nd semiblock . The
* return code of the decryption operation will be EBADMSG in case an
* integrity error occurs .
*
* To obtain the full result of an encryption as expected by SP800 - 38F , the
* caller must allocate a buffer of plaintext + 8 bytes :
*
* unsigned int datalen = ptlen + crypto_skcipher_ivsize ( tfm ) ;
* u8 data [ datalen ] ;
* u8 * iv = data ;
* u8 * pt = data + crypto_skcipher_ivsize ( tfm ) ;
* < ensure that pt contains the plaintext of size ptlen >
2019-01-04 07:16:19 +03:00
* sg_init_one ( & sg , pt , ptlen ) ;
2015-09-21 21:58:23 +03:00
* skcipher_request_set_crypt ( req , & sg , & sg , ptlen , iv ) ;
*
* = = > After encryption , data now contains full KW result as per SP800 - 38F .
*
* In case of decryption , ciphertext now already has the expected length
* and must be segmented appropriately :
*
* unsigned int datalen = CTLEN ;
* u8 data [ datalen ] ;
* < ensure that data contains full ciphertext >
* u8 * iv = data ;
* u8 * ct = data + crypto_skcipher_ivsize ( tfm ) ;
* unsigned int ctlen = datalen - crypto_skcipher_ivsize ( tfm ) ;
2019-01-04 07:16:19 +03:00
* sg_init_one ( & sg , ct , ctlen ) ;
* skcipher_request_set_crypt ( req , & sg , & sg , ctlen , iv ) ;
2015-09-21 21:58:23 +03:00
*
* = = > After decryption ( which hopefully does not return EBADMSG ) , the ct
* pointer now points to the plaintext of size ctlen .
*
* Note 2 : KWP is not implemented as this would defy in - place operation .
* If somebody wants to wrap non - aligned data , he should simply pad
* the input with zeros to fill it up to the 8 byte boundary .
*/
# include <linux/module.h>
# include <linux/crypto.h>
# include <linux/scatterlist.h>
# include <crypto/scatterwalk.h>
2020-12-11 15:27:15 +03:00
# include <crypto/internal/cipher.h>
2015-09-21 21:58:23 +03:00
# include <crypto/internal/skcipher.h>
struct crypto_kw_block {
# define SEMIBSIZE 8
2017-10-03 05:19:59 +03:00
__be64 A ;
__be64 R ;
2015-09-21 21:58:23 +03:00
} ;
/*
* Fast forward the SGL to the " end " length minus SEMIBSIZE .
* The start in the SGL defined by the fast - forward is returned with
* the walk variable
*/
static void crypto_kw_scatterlist_ff ( struct scatter_walk * walk ,
struct scatterlist * sg ,
unsigned int end )
{
unsigned int skip = 0 ;
/* The caller should only operate on full SEMIBLOCKs. */
BUG_ON ( end < SEMIBSIZE ) ;
skip = end - SEMIBSIZE ;
while ( sg ) {
if ( sg - > length > skip ) {
scatterwalk_start ( walk , sg ) ;
scatterwalk_advance ( walk , skip ) ;
break ;
2021-03-26 21:13:59 +03:00
}
2015-09-21 21:58:23 +03:00
2021-03-26 21:13:59 +03:00
skip - = sg - > length ;
2015-09-21 21:58:23 +03:00
sg = sg_next ( sg ) ;
}
}
2019-01-04 07:16:19 +03:00
static int crypto_kw_decrypt ( struct skcipher_request * req )
2015-09-21 21:58:23 +03:00
{
2019-01-04 07:16:19 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct crypto_cipher * cipher = skcipher_cipher_simple ( tfm ) ;
2017-10-03 05:19:59 +03:00
struct crypto_kw_block block ;
2019-01-04 07:16:19 +03:00
struct scatterlist * src , * dst ;
u64 t = 6 * ( ( req - > cryptlen ) > > 3 ) ;
2017-10-03 05:19:59 +03:00
unsigned int i ;
2015-09-21 21:58:23 +03:00
int ret = 0 ;
/*
* Require at least 2 semiblocks ( note , the 3 rd semiblock that is
* required by SP800 - 38F is the IV .
*/
2019-01-04 07:16:19 +03:00
if ( req - > cryptlen < ( 2 * SEMIBSIZE ) | | req - > cryptlen % SEMIBSIZE )
2015-09-21 21:58:23 +03:00
return - EINVAL ;
/* Place the IV into block A */
2019-01-04 07:16:19 +03:00
memcpy ( & block . A , req - > iv , SEMIBSIZE ) ;
2015-09-21 21:58:23 +03:00
/*
* src scatterlist is read - only . dst scatterlist is r / w . During the
2019-01-04 07:16:19 +03:00
* first loop , src points to req - > src and dst to req - > dst . For any
* subsequent round , the code operates on req - > dst only .
2015-09-21 21:58:23 +03:00
*/
2019-01-04 07:16:19 +03:00
src = req - > src ;
dst = req - > dst ;
2015-09-21 21:58:23 +03:00
for ( i = 0 ; i < 6 ; i + + ) {
struct scatter_walk src_walk , dst_walk ;
2019-01-04 07:16:19 +03:00
unsigned int nbytes = req - > cryptlen ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
while ( nbytes ) {
/* move pointer by nbytes in the SGL */
crypto_kw_scatterlist_ff ( & src_walk , src , nbytes ) ;
2015-09-21 21:58:23 +03:00
/* get the source block */
2017-10-03 05:19:59 +03:00
scatterwalk_copychunks ( & block . R , & src_walk , SEMIBSIZE ,
2015-09-21 21:58:23 +03:00
false ) ;
/* perform KW operation: modify IV with counter */
2017-10-03 05:19:59 +03:00
block . A ^ = cpu_to_be64 ( t ) ;
2015-09-21 21:58:23 +03:00
t - - ;
/* perform KW operation: decrypt block */
2019-01-04 07:16:19 +03:00
crypto_cipher_decrypt_one ( cipher , ( u8 * ) & block ,
( u8 * ) & block ) ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
/* move pointer by nbytes in the SGL */
crypto_kw_scatterlist_ff ( & dst_walk , dst , nbytes ) ;
2015-09-21 21:58:23 +03:00
/* Copy block->R into place */
2017-10-03 05:19:59 +03:00
scatterwalk_copychunks ( & block . R , & dst_walk , SEMIBSIZE ,
2015-09-21 21:58:23 +03:00
true ) ;
2019-01-04 07:16:19 +03:00
nbytes - = SEMIBSIZE ;
2015-09-21 21:58:23 +03:00
}
/* we now start to operate on the dst SGL only */
2019-01-04 07:16:19 +03:00
src = req - > dst ;
dst = req - > dst ;
2015-09-21 21:58:23 +03:00
}
/* Perform authentication check */
2017-11-15 13:44:28 +03:00
if ( block . A ! = cpu_to_be64 ( 0xa6a6a6a6a6a6a6a6ULL ) )
2015-09-21 21:58:23 +03:00
ret = - EBADMSG ;
2017-10-03 05:19:59 +03:00
memzero_explicit ( & block , sizeof ( struct crypto_kw_block ) ) ;
2015-09-21 21:58:23 +03:00
return ret ;
}
2019-01-04 07:16:19 +03:00
static int crypto_kw_encrypt ( struct skcipher_request * req )
2015-09-21 21:58:23 +03:00
{
2019-01-04 07:16:19 +03:00
struct crypto_skcipher * tfm = crypto_skcipher_reqtfm ( req ) ;
struct crypto_cipher * cipher = skcipher_cipher_simple ( tfm ) ;
2017-10-03 05:19:59 +03:00
struct crypto_kw_block block ;
2019-01-04 07:16:19 +03:00
struct scatterlist * src , * dst ;
2017-10-03 05:19:59 +03:00
u64 t = 1 ;
unsigned int i ;
2015-09-21 21:58:23 +03:00
/*
* Require at least 2 semiblocks ( note , the 3 rd semiblock that is
* required by SP800 - 38F is the IV that occupies the first semiblock .
* This means that the dst memory must be one semiblock larger than src .
* Also ensure that the given data is aligned to semiblock .
*/
2019-01-04 07:16:19 +03:00
if ( req - > cryptlen < ( 2 * SEMIBSIZE ) | | req - > cryptlen % SEMIBSIZE )
2015-09-21 21:58:23 +03:00
return - EINVAL ;
/*
* Place the predefined IV into block A - - for encrypt , the caller
* does not need to provide an IV , but he needs to fetch the final IV .
*/
2017-11-15 13:44:28 +03:00
block . A = cpu_to_be64 ( 0xa6a6a6a6a6a6a6a6ULL ) ;
2015-09-21 21:58:23 +03:00
/*
* src scatterlist is read - only . dst scatterlist is r / w . During the
2019-01-04 07:16:19 +03:00
* first loop , src points to req - > src and dst to req - > dst . For any
* subsequent round , the code operates on req - > dst only .
2015-09-21 21:58:23 +03:00
*/
2019-01-04 07:16:19 +03:00
src = req - > src ;
dst = req - > dst ;
2015-09-21 21:58:23 +03:00
for ( i = 0 ; i < 6 ; i + + ) {
struct scatter_walk src_walk , dst_walk ;
2019-01-04 07:16:19 +03:00
unsigned int nbytes = req - > cryptlen ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
scatterwalk_start ( & src_walk , src ) ;
scatterwalk_start ( & dst_walk , dst ) ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
while ( nbytes ) {
2015-09-21 21:58:23 +03:00
/* get the source block */
2017-10-03 05:19:59 +03:00
scatterwalk_copychunks ( & block . R , & src_walk , SEMIBSIZE ,
2015-09-21 21:58:23 +03:00
false ) ;
/* perform KW operation: encrypt block */
2019-01-04 07:16:19 +03:00
crypto_cipher_encrypt_one ( cipher , ( u8 * ) & block ,
2017-10-03 05:19:59 +03:00
( u8 * ) & block ) ;
2015-09-21 21:58:23 +03:00
/* perform KW operation: modify IV with counter */
2017-10-03 05:19:59 +03:00
block . A ^ = cpu_to_be64 ( t ) ;
2015-09-21 21:58:23 +03:00
t + + ;
/* Copy block->R into place */
2017-10-03 05:19:59 +03:00
scatterwalk_copychunks ( & block . R , & dst_walk , SEMIBSIZE ,
2015-09-21 21:58:23 +03:00
true ) ;
2019-01-04 07:16:19 +03:00
nbytes - = SEMIBSIZE ;
2015-09-21 21:58:23 +03:00
}
/* we now start to operate on the dst SGL only */
2019-01-04 07:16:19 +03:00
src = req - > dst ;
dst = req - > dst ;
2015-09-21 21:58:23 +03:00
}
/* establish the IV for the caller to pick up */
2019-01-04 07:16:19 +03:00
memcpy ( req - > iv , & block . A , SEMIBSIZE ) ;
2015-09-21 21:58:23 +03:00
2017-10-03 05:19:59 +03:00
memzero_explicit ( & block , sizeof ( struct crypto_kw_block ) ) ;
2015-09-21 21:58:23 +03:00
return 0 ;
}
2019-01-04 07:16:19 +03:00
static int crypto_kw_create ( struct crypto_template * tmpl , struct rtattr * * tb )
2015-09-21 21:58:23 +03:00
{
2019-01-04 07:16:19 +03:00
struct skcipher_instance * inst ;
struct crypto_alg * alg ;
2015-09-21 21:58:23 +03:00
int err ;
2019-12-20 08:29:40 +03:00
inst = skcipher_alloc_instance_simple ( tmpl , tb ) ;
2019-01-04 07:16:19 +03:00
if ( IS_ERR ( inst ) )
return PTR_ERR ( inst ) ;
2015-09-21 21:58:23 +03:00
2019-12-20 08:29:40 +03:00
alg = skcipher_ialg_simple ( inst ) ;
2019-01-04 07:16:19 +03:00
err = - EINVAL ;
2015-09-21 21:58:23 +03:00
/* Section 5.1 requirement for KW */
if ( alg - > cra_blocksize ! = sizeof ( struct crypto_kw_block ) )
2019-01-04 07:16:19 +03:00
goto out_free_inst ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
inst - > alg . base . cra_blocksize = SEMIBSIZE ;
inst - > alg . base . cra_alignmask = 0 ;
inst - > alg . ivsize = SEMIBSIZE ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
inst - > alg . encrypt = crypto_kw_encrypt ;
inst - > alg . decrypt = crypto_kw_decrypt ;
2015-09-21 21:58:23 +03:00
2019-01-04 07:16:19 +03:00
err = skcipher_register_instance ( tmpl , inst ) ;
2019-12-20 08:29:40 +03:00
if ( err ) {
2019-01-04 07:16:19 +03:00
out_free_inst :
2019-12-20 08:29:40 +03:00
inst - > free ( inst ) ;
}
2019-01-04 07:16:19 +03:00
return err ;
2015-09-21 21:58:23 +03:00
}
static struct crypto_template crypto_kw_tmpl = {
. name = " kw " ,
2019-01-04 07:16:19 +03:00
. create = crypto_kw_create ,
2015-09-21 21:58:23 +03:00
. module = THIS_MODULE ,
} ;
static int __init crypto_kw_init ( void )
{
return crypto_register_template ( & crypto_kw_tmpl ) ;
}
static void __exit crypto_kw_exit ( void )
{
crypto_unregister_template ( & crypto_kw_tmpl ) ;
}
2019-04-12 07:57:42 +03:00
subsys_initcall ( crypto_kw_init ) ;
2015-09-21 21:58:23 +03:00
module_exit ( crypto_kw_exit ) ;
MODULE_LICENSE ( " Dual BSD/GPL " ) ;
MODULE_AUTHOR ( " Stephan Mueller <smueller@chronox.de> " ) ;
MODULE_DESCRIPTION ( " Key Wrapping (RFC3394 / NIST SP800-38F) " ) ;
MODULE_ALIAS_CRYPTO ( " kw " ) ;
2020-12-11 15:27:15 +03:00
MODULE_IMPORT_NS ( CRYPTO_INTERNAL ) ;