Pull crypto updates from Herbert Xu: "Here is the crypto update for 5.3: API: - Test shash interface directly in testmgr - cra_driver_name is now mandatory Algorithms: - Replace arc4 crypto_cipher with library helper - Implement 5 way interleave for ECB, CBC and CTR on arm64 - Add xxhash - Add continuous self-test on noise source to drbg - Update jitter RNG Drivers: - Add support for SHA204A random number generator - Add support for 7211 in iproc-rng200 - Fix fuzz test failures in inside-secure - Fix fuzz test failures in talitos - Fix fuzz test failures in qat" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (143 commits) crypto: stm32/hash - remove interruptible condition for dma crypto: stm32/hash - Fix hmac issue more than 256 bytes crypto: stm32/crc32 - rename driver file crypto: amcc - remove memset after dma_alloc_coherent crypto: ccp - Switch to SPDX license identifiers crypto: ccp - Validate the the error value used to index error messages crypto: doc - Fix formatting of new crypto engine content crypto: doc - Add parameter documentation crypto: arm64/aes-ce - implement 5 way interleave for ECB, CBC and CTR crypto: arm64/aes-ce - add 5 way interleave routines crypto: talitos - drop icv_ool crypto: talitos - fix hash on SEC1. crypto: talitos - move struct talitos_edesc into talitos.h lib/scatterlist: Fix mapping iterator when sg->offset is greater than PAGE_SIZE crypto/NX: Set receive window credits to max number of CRBs in RxFIFO crypto: asymmetric_keys - select CRYPTO_HASH where needed crypto: serpent - mark __serpent_setkey_sbox noinline crypto: testmgr - dynamically allocate crypto_shash crypto: testmgr - dynamically allocate testvec_config crypto: talitos - eliminate unneeded 'done' functions at build time ...
		
			
				
	
	
		
			280 lines
		
	
	
		
			6.5 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			280 lines
		
	
	
		
			6.5 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| // SPDX-License-Identifier: GPL-2.0-or-later
 | |
| /* 
 | |
|  * Cryptographic API.
 | |
|  *
 | |
|  * TEA, XTEA, and XETA crypto alogrithms
 | |
|  *
 | |
|  * The TEA and Xtended TEA algorithms were developed by David Wheeler 
 | |
|  * and Roger Needham at the Computer Laboratory of Cambridge University.
 | |
|  *
 | |
|  * Due to the order of evaluation in XTEA many people have incorrectly
 | |
|  * implemented it.  XETA (XTEA in the wrong order), exists for
 | |
|  * compatibility with these implementations.
 | |
|  *
 | |
|  * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
 | |
|  */
 | |
| 
 | |
| #include <linux/init.h>
 | |
| #include <linux/module.h>
 | |
| #include <linux/mm.h>
 | |
| #include <asm/byteorder.h>
 | |
| #include <linux/crypto.h>
 | |
| #include <linux/types.h>
 | |
| 
 | |
| #define TEA_KEY_SIZE		16
 | |
| #define TEA_BLOCK_SIZE		8
 | |
| #define TEA_ROUNDS		32
 | |
| #define TEA_DELTA		0x9e3779b9
 | |
| 
 | |
| #define XTEA_KEY_SIZE		16
 | |
| #define XTEA_BLOCK_SIZE		8
 | |
| #define XTEA_ROUNDS		32
 | |
| #define XTEA_DELTA		0x9e3779b9
 | |
| 
 | |
| struct tea_ctx {
 | |
| 	u32 KEY[4];
 | |
| };
 | |
| 
 | |
| struct xtea_ctx {
 | |
| 	u32 KEY[4];
 | |
| };
 | |
| 
 | |
| static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 | |
| 		      unsigned int key_len)
 | |
| {
 | |
| 	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *key = (const __le32 *)in_key;
 | |
| 
 | |
| 	ctx->KEY[0] = le32_to_cpu(key[0]);
 | |
| 	ctx->KEY[1] = le32_to_cpu(key[1]);
 | |
| 	ctx->KEY[2] = le32_to_cpu(key[2]);
 | |
| 	ctx->KEY[3] = le32_to_cpu(key[3]);
 | |
| 
 | |
| 	return 0; 
 | |
| 
 | |
| }
 | |
| 
 | |
| static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, n, sum = 0;
 | |
| 	u32 k0, k1, k2, k3;
 | |
| 	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	k0 = ctx->KEY[0];
 | |
| 	k1 = ctx->KEY[1];
 | |
| 	k2 = ctx->KEY[2];
 | |
| 	k3 = ctx->KEY[3];
 | |
| 
 | |
| 	n = TEA_ROUNDS;
 | |
| 
 | |
| 	while (n-- > 0) {
 | |
| 		sum += TEA_DELTA;
 | |
| 		y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
 | |
| 		z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, n, sum;
 | |
| 	u32 k0, k1, k2, k3;
 | |
| 	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	k0 = ctx->KEY[0];
 | |
| 	k1 = ctx->KEY[1];
 | |
| 	k2 = ctx->KEY[2];
 | |
| 	k3 = ctx->KEY[3];
 | |
| 
 | |
| 	sum = TEA_DELTA << 5;
 | |
| 
 | |
| 	n = TEA_ROUNDS;
 | |
| 
 | |
| 	while (n-- > 0) {
 | |
| 		z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
 | |
| 		y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
 | |
| 		sum -= TEA_DELTA;
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 | |
| 		       unsigned int key_len)
 | |
| {
 | |
| 	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *key = (const __le32 *)in_key;
 | |
| 
 | |
| 	ctx->KEY[0] = le32_to_cpu(key[0]);
 | |
| 	ctx->KEY[1] = le32_to_cpu(key[1]);
 | |
| 	ctx->KEY[2] = le32_to_cpu(key[2]);
 | |
| 	ctx->KEY[3] = le32_to_cpu(key[3]);
 | |
| 
 | |
| 	return 0; 
 | |
| 
 | |
| }
 | |
| 
 | |
| static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, sum = 0;
 | |
| 	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
 | |
| 	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	while (sum != limit) {
 | |
| 		y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 
 | |
| 		sum += XTEA_DELTA;
 | |
| 		z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, sum;
 | |
| 	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	sum = XTEA_DELTA * XTEA_ROUNDS;
 | |
| 
 | |
| 	while (sum) {
 | |
| 		z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
 | |
| 		sum -= XTEA_DELTA;
 | |
| 		y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| 
 | |
| static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, sum = 0;
 | |
| 	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
 | |
| 	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	while (sum != limit) {
 | |
| 		y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
 | |
| 		sum += XTEA_DELTA;
 | |
| 		z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 | |
| {
 | |
| 	u32 y, z, sum;
 | |
| 	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 | |
| 	const __le32 *in = (const __le32 *)src;
 | |
| 	__le32 *out = (__le32 *)dst;
 | |
| 
 | |
| 	y = le32_to_cpu(in[0]);
 | |
| 	z = le32_to_cpu(in[1]);
 | |
| 
 | |
| 	sum = XTEA_DELTA * XTEA_ROUNDS;
 | |
| 
 | |
| 	while (sum) {
 | |
| 		z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
 | |
| 		sum -= XTEA_DELTA;
 | |
| 		y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
 | |
| 	}
 | |
| 	
 | |
| 	out[0] = cpu_to_le32(y);
 | |
| 	out[1] = cpu_to_le32(z);
 | |
| }
 | |
| 
 | |
| static struct crypto_alg tea_algs[3] = { {
 | |
| 	.cra_name		=	"tea",
 | |
| 	.cra_driver_name	=	"tea-generic",
 | |
| 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
 | |
| 	.cra_blocksize		=	TEA_BLOCK_SIZE,
 | |
| 	.cra_ctxsize		=	sizeof (struct tea_ctx),
 | |
| 	.cra_alignmask		=	3,
 | |
| 	.cra_module		=	THIS_MODULE,
 | |
| 	.cra_u			=	{ .cipher = {
 | |
| 	.cia_min_keysize	=	TEA_KEY_SIZE,
 | |
| 	.cia_max_keysize	=	TEA_KEY_SIZE,
 | |
| 	.cia_setkey		= 	tea_setkey,
 | |
| 	.cia_encrypt		=	tea_encrypt,
 | |
| 	.cia_decrypt		=	tea_decrypt } }
 | |
| }, {
 | |
| 	.cra_name		=	"xtea",
 | |
| 	.cra_driver_name	=	"xtea-generic",
 | |
| 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
 | |
| 	.cra_blocksize		=	XTEA_BLOCK_SIZE,
 | |
| 	.cra_ctxsize		=	sizeof (struct xtea_ctx),
 | |
| 	.cra_alignmask		=	3,
 | |
| 	.cra_module		=	THIS_MODULE,
 | |
| 	.cra_u			=	{ .cipher = {
 | |
| 	.cia_min_keysize	=	XTEA_KEY_SIZE,
 | |
| 	.cia_max_keysize	=	XTEA_KEY_SIZE,
 | |
| 	.cia_setkey		= 	xtea_setkey,
 | |
| 	.cia_encrypt		=	xtea_encrypt,
 | |
| 	.cia_decrypt		=	xtea_decrypt } }
 | |
| }, {
 | |
| 	.cra_name		=	"xeta",
 | |
| 	.cra_driver_name	=	"xeta-generic",
 | |
| 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
 | |
| 	.cra_blocksize		=	XTEA_BLOCK_SIZE,
 | |
| 	.cra_ctxsize		=	sizeof (struct xtea_ctx),
 | |
| 	.cra_alignmask		=	3,
 | |
| 	.cra_module		=	THIS_MODULE,
 | |
| 	.cra_u			=	{ .cipher = {
 | |
| 	.cia_min_keysize	=	XTEA_KEY_SIZE,
 | |
| 	.cia_max_keysize	=	XTEA_KEY_SIZE,
 | |
| 	.cia_setkey		= 	xtea_setkey,
 | |
| 	.cia_encrypt		=	xeta_encrypt,
 | |
| 	.cia_decrypt		=	xeta_decrypt } }
 | |
| } };
 | |
| 
 | |
| static int __init tea_mod_init(void)
 | |
| {
 | |
| 	return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
 | |
| }
 | |
| 
 | |
| static void __exit tea_mod_fini(void)
 | |
| {
 | |
| 	crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
 | |
| }
 | |
| 
 | |
| MODULE_ALIAS_CRYPTO("tea");
 | |
| MODULE_ALIAS_CRYPTO("xtea");
 | |
| MODULE_ALIAS_CRYPTO("xeta");
 | |
| 
 | |
| subsys_initcall(tea_mod_init);
 | |
| module_exit(tea_mod_fini);
 | |
| 
 | |
| MODULE_LICENSE("GPL");
 | |
| MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
 |