2019-05-27 08:55:01 +02:00
/* SPDX-License-Identifier: GPL-2.0-or-later */
2007-12-12 19:23:36 +08:00
/*
* AEAD : Authenticated Encryption with Associated Data
*
2015-08-14 15:30:41 +08:00
* Copyright ( c ) 2007 - 2015 Herbert Xu < herbert @ gondor . apana . org . au >
2007-12-12 19:23:36 +08:00
*/
# ifndef _CRYPTO_INTERNAL_AEAD_H
# define _CRYPTO_INTERNAL_AEAD_H
# include <crypto/aead.h>
# include <crypto/algapi.h>
2015-06-18 14:00:49 +08:00
# include <linux/stddef.h>
2007-12-12 19:23:36 +08:00
# include <linux/types.h>
struct rtattr ;
2015-05-21 15:11:08 +08:00
struct aead_instance {
2015-07-09 07:17:17 +08:00
void ( * free ) ( struct aead_instance * inst ) ;
2015-06-18 14:00:49 +08:00
union {
struct {
char head [ offsetof ( struct aead_alg , base ) ] ;
struct crypto_instance base ;
} s ;
struct aead_alg alg ;
} ;
2015-05-21 15:11:08 +08:00
} ;
2007-12-12 19:23:36 +08:00
struct crypto_aead_spawn {
struct crypto_spawn base ;
} ;
2015-07-08 12:15:14 +08:00
struct aead_queue {
struct crypto_queue base ;
} ;
2015-05-11 17:48:12 +08:00
static inline void * crypto_aead_ctx ( struct crypto_aead * tfm )
{
return crypto_tfm_ctx ( & tfm - > base ) ;
}
2022-11-25 12:36:33 +08:00
static inline void * crypto_aead_ctx_dma ( struct crypto_aead * tfm )
{
return crypto_tfm_ctx_dma ( & tfm - > base ) ;
}
2015-05-21 15:11:08 +08:00
static inline struct crypto_instance * aead_crypto_instance (
struct aead_instance * inst )
{
return container_of ( & inst - > alg . base , struct crypto_instance , alg ) ;
}
static inline struct aead_instance * aead_instance ( struct crypto_instance * inst )
{
return container_of ( & inst - > alg , struct aead_instance , alg . base ) ;
}
2015-05-28 22:07:55 +08:00
static inline struct aead_instance * aead_alg_instance ( struct crypto_aead * aead )
{
2015-08-14 15:30:41 +08:00
return aead_instance ( crypto_tfm_alg_instance ( & aead - > base ) ) ;
2015-05-28 22:07:55 +08:00
}
2015-05-21 15:11:08 +08:00
static inline void * aead_instance_ctx ( struct aead_instance * inst )
{
return crypto_instance_ctx ( aead_crypto_instance ( inst ) ) ;
}
2015-05-11 17:48:12 +08:00
static inline void * aead_request_ctx ( struct aead_request * req )
{
return req - > __ctx ;
}
2022-11-25 12:36:33 +08:00
static inline void * aead_request_ctx_dma ( struct aead_request * req )
{
unsigned int align = crypto_dma_align ( ) ;
if ( align < = crypto_tfm_ctx_alignment ( ) )
align = 1 ;
return PTR_ALIGN ( aead_request_ctx ( req ) , align ) ;
}
2015-05-11 17:48:12 +08:00
static inline void aead_request_complete ( struct aead_request * req , int err )
{
2023-01-31 16:01:51 +08:00
crypto_request_complete ( & req - > base , err ) ;
2015-05-11 17:48:12 +08:00
}
static inline u32 aead_request_flags ( struct aead_request * req )
{
return req - > base . flags ;
}
2016-02-01 11:17:30 -08:00
static inline struct aead_request * aead_request_cast (
struct crypto_async_request * req )
{
return container_of ( req , struct aead_request , base ) ;
}
2020-01-02 19:58:46 -08:00
int crypto_grab_aead ( struct crypto_aead_spawn * spawn ,
struct crypto_instance * inst ,
const char * name , u32 type , u32 mask ) ;
2007-12-12 19:24:27 +08:00
2007-12-12 19:23:36 +08:00
static inline void crypto_drop_aead ( struct crypto_aead_spawn * spawn )
{
crypto_drop_spawn ( & spawn - > base ) ;
}
2015-05-21 15:11:08 +08:00
static inline struct aead_alg * crypto_spawn_aead_alg (
struct crypto_aead_spawn * spawn )
{
return container_of ( spawn - > base . alg , struct aead_alg , base ) ;
}
2007-12-12 19:23:36 +08:00
static inline struct crypto_aead * crypto_spawn_aead (
struct crypto_aead_spawn * spawn )
{
2015-05-11 17:48:12 +08:00
return crypto_spawn_tfm2 ( & spawn - > base ) ;
2007-12-12 19:23:36 +08:00
}
2015-05-11 17:47:52 +08:00
static inline void crypto_aead_set_reqsize ( struct crypto_aead * aead ,
unsigned int reqsize )
{
2015-08-14 15:30:41 +08:00
aead - > reqsize = reqsize ;
2015-05-11 17:47:52 +08:00
}
2022-11-25 12:36:33 +08:00
static inline void crypto_aead_set_reqsize_dma ( struct crypto_aead * aead ,
unsigned int reqsize )
{
reqsize + = crypto_dma_align ( ) & ~ ( crypto_tfm_ctx_alignment ( ) - 1 ) ;
aead - > reqsize = reqsize ;
}
2015-07-08 12:15:14 +08:00
static inline void aead_init_queue ( struct aead_queue * queue ,
unsigned int max_qlen )
{
crypto_init_queue ( & queue - > base , max_qlen ) ;
}
2016-07-12 13:17:33 +08:00
static inline unsigned int crypto_aead_alg_chunksize ( struct aead_alg * alg )
{
return alg - > chunksize ;
}
/**
* crypto_aead_chunksize ( ) - obtain chunk size
* @ tfm : cipher handle
*
* The block size is set to one for ciphers such as CCM . However ,
* you still need to provide incremental updates in multiples of
* the underlying block size as the IV does not have sub - block
* granularity . This is known in this API as the chunk size .
*
* Return : chunk size in bytes
*/
static inline unsigned int crypto_aead_chunksize ( struct crypto_aead * tfm )
{
return crypto_aead_alg_chunksize ( crypto_aead_alg ( tfm ) ) ;
}
2015-05-21 15:11:08 +08:00
int crypto_register_aead ( struct aead_alg * alg ) ;
2015-05-28 22:07:57 +08:00
void crypto_unregister_aead ( struct aead_alg * alg ) ;
2015-05-28 22:07:59 +08:00
int crypto_register_aeads ( struct aead_alg * algs , int count ) ;
void crypto_unregister_aeads ( struct aead_alg * algs , int count ) ;
2015-05-21 15:11:08 +08:00
int aead_register_instance ( struct crypto_template * tmpl ,
struct aead_instance * inst ) ;
2007-12-12 19:23:36 +08:00
# endif /* _CRYPTO_INTERNAL_AEAD_H */