2018-01-22 12:27:00 +03:00
/* SPDX-License-Identifier: GPL-2.0 */
2019-04-18 16:38:53 +03:00
/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
2018-01-22 12:27:00 +03:00
/* \file cc_driver.h
* ARM CryptoCell Linux Crypto Driver
*/
# ifndef __CC_DRIVER_H__
# define __CC_DRIVER_H__
# ifdef COMP_IN_WQ
# include <linux/workqueue.h>
# else
# include <linux/interrupt.h>
# endif
# include <linux/dma-mapping.h>
# include <crypto/algapi.h>
2018-01-22 12:27:01 +03:00
# include <crypto/internal/skcipher.h>
2018-01-22 12:27:00 +03:00
# include <crypto/aes.h>
2020-11-13 08:20:21 +03:00
# include <crypto/sha1.h>
# include <crypto/sha2.h>
2018-01-22 12:27:00 +03:00
# include <crypto/aead.h>
# include <crypto/authenc.h>
# include <crypto/hash.h>
# include <crypto/skcipher.h>
# include <linux/clk.h>
# include <linux/platform_device.h>
# include "cc_host_regs.h"
# include "cc_crypto_ctx.h"
# include "cc_hw_queue_defs.h"
# include "cc_sram_mgr.h"
extern bool cc_dump_desc ;
extern bool cc_dump_bytes ;
2018-10-29 12:50:12 +03:00
# define DRV_MODULE_VERSION "5.0"
2018-02-19 17:51:23 +03:00
enum cc_hw_rev {
CC_HW_REV_630 = 630 ,
CC_HW_REV_710 = 710 ,
2018-10-29 12:50:12 +03:00
CC_HW_REV_712 = 712 ,
CC_HW_REV_713 = 713
2018-02-19 17:51:23 +03:00
} ;
2018-01-22 12:27:00 +03:00
2018-11-13 12:40:35 +03:00
enum cc_std_body {
CC_STD_NIST = 0x1 ,
CC_STD_OSCCA = 0x2 ,
CC_STD_ALL = 0x3
} ;
2019-06-17 11:46:30 +03:00
# define CC_PINS_FULL 0x0
# define CC_PINS_SLIM 0x9F
2018-01-22 12:27:00 +03:00
/* Maximum DMA mask supported by IP */
# define DMA_BIT_MASK_LEN 48
# define CC_AXI_IRQ_MASK ((1 << CC_AXIM_CFG_BRESPMASK_BIT_SHIFT) | \
( 1 < < CC_AXIM_CFG_RRESPMASK_BIT_SHIFT ) | \
( 1 < < CC_AXIM_CFG_INFLTMASK_BIT_SHIFT ) | \
( 1 < < CC_AXIM_CFG_COMPMASK_BIT_SHIFT ) )
# define CC_AXI_ERR_IRQ_MASK BIT(CC_HOST_IRR_AXI_ERR_INT_BIT_SHIFT)
# define CC_COMP_IRQ_MASK BIT(CC_HOST_IRR_AXIM_COMP_INT_BIT_SHIFT)
2019-04-18 16:38:39 +03:00
# define CC_SECURITY_DISABLED_MASK BIT(CC_SECURITY_DISABLED_VALUE_BIT_SHIFT)
2019-06-17 11:46:28 +03:00
# define CC_NVM_IS_IDLE_MASK BIT(CC_NVM_IS_IDLE_VALUE_BIT_SHIFT)
2020-03-08 18:57:04 +03:00
# define AXIM_MON_COMP_VALUE CC_GENMASK(CC_AXIM_MON_COMP_VALUE)
2018-01-22 12:27:00 +03:00
2019-04-18 16:38:40 +03:00
# define CC_CPP_AES_ABORT_MASK ( \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_0_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_1_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_2_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_3_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_4_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_5_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_6_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_AES_7_MASK_BIT_SHIFT ) )
# define CC_CPP_SM4_ABORT_MASK ( \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_0_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_1_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_2_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_3_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_4_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_5_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_6_MASK_BIT_SHIFT ) | \
BIT ( CC_HOST_IMR_REE_OP_ABORTED_SM_7_MASK_BIT_SHIFT ) )
2018-01-22 12:27:00 +03:00
/* Register name mangling macro */
# define CC_REG(reg_name) CC_ ## reg_name ## _REG_OFFSET
/* TEE FIPS status interrupt */
# define CC_GPR0_IRQ_MASK BIT(CC_HOST_IRR_GPR0_BIT_SHIFT)
# define CC_CRA_PRIO 400
# define MIN_HW_QUEUE_SIZE 50 /* Minimum size required for proper function */
# define MAX_REQUEST_QUEUE_SIZE 4096
# define MAX_MLLI_BUFF_SIZE 2080
/* Definitions for HW descriptors DIN/DOUT fields */
# define NS_BIT 1
# define AXI_ID 0
/* AXI_ID is not actually the AXI ID of the transaction but the value of AXI_ID
* field in the HW descriptor . The DMA engine + 8 that value .
*/
2019-04-18 16:38:40 +03:00
struct cc_cpp_req {
bool is_cpp ;
enum cc_cpp_alg alg ;
u8 slot ;
} ;
2018-01-22 12:27:00 +03:00
# define CC_MAX_IVGEN_DMA_ADDRESSES 3
struct cc_crypto_req {
void ( * user_cb ) ( struct device * dev , void * req , int err ) ;
void * user_arg ;
struct completion seq_compl ; /* request completion */
2019-04-18 16:38:40 +03:00
struct cc_cpp_req cpp ;
2018-01-22 12:27:00 +03:00
} ;
/**
* struct cc_drvdata - driver private data context
* @ cc_base : virt address of the CC registers
2020-01-16 13:14:44 +03:00
* @ irq : bitmap indicating source of last interrupt
2018-01-22 12:27:00 +03:00
*/
struct cc_drvdata {
void __iomem * cc_base ;
int irq ;
struct completion hw_queue_avail ; /* wait for HW queue availability */
struct platform_device * plat_dev ;
2020-02-11 21:19:07 +03:00
u32 mlli_sram_addr ;
2020-02-11 21:19:14 +03:00
struct dma_pool * mlli_buffs_pool ;
2020-02-11 21:19:15 +03:00
struct list_head alg_list ;
2018-01-22 12:27:02 +03:00
void * hash_handle ;
2018-01-22 12:27:03 +03:00
void * aead_handle ;
2018-01-22 12:27:00 +03:00
void * request_mgr_handle ;
2018-01-22 12:27:04 +03:00
void * fips_handle ;
2020-02-11 21:19:12 +03:00
u32 sram_free_offset ; /* offset to non-allocated area in SRAM */
2020-02-11 21:19:13 +03:00
struct dentry * dir ; /* for debugfs */
2018-01-22 12:27:00 +03:00
struct clk * clk ;
bool coherent ;
2018-02-19 17:51:23 +03:00
char * hw_rev_name ;
enum cc_hw_rev hw_rev ;
u32 axim_mon_offset ;
2018-05-24 17:19:06 +03:00
u32 sig_offset ;
u32 ver_offset ;
2018-11-13 12:40:35 +03:00
int std_bodies ;
2019-04-18 16:38:39 +03:00
bool sec_disabled ;
2019-04-18 16:38:40 +03:00
u32 comp_mask ;
2020-11-22 10:51:53 +03:00
u32 cache_params ;
u32 ace_const ;
2018-01-22 12:27:00 +03:00
} ;
struct cc_crypto_alg {
struct list_head entry ;
int cipher_mode ;
int flow_mode ; /* Note: currently, refers to the cipher mode only. */
int auth_mode ;
struct cc_drvdata * drvdata ;
2018-01-22 12:27:01 +03:00
struct skcipher_alg skcipher_alg ;
2018-01-22 12:27:03 +03:00
struct aead_alg aead_alg ;
2018-01-22 12:27:00 +03:00
} ;
struct cc_alg_template {
char name [ CRYPTO_MAX_ALG_NAME ] ;
char driver_name [ CRYPTO_MAX_ALG_NAME ] ;
unsigned int blocksize ;
union {
struct skcipher_alg skcipher ;
struct aead_alg aead ;
} template_u ;
int cipher_mode ;
int flow_mode ; /* Note: currently, refers to the cipher mode only. */
int auth_mode ;
2018-02-19 17:51:23 +03:00
u32 min_hw_rev ;
2018-11-13 12:40:35 +03:00
enum cc_std_body std_body ;
2019-04-18 16:38:39 +03:00
bool sec_func ;
2018-01-22 12:27:01 +03:00
unsigned int data_unit ;
2018-01-22 12:27:00 +03:00
struct cc_drvdata * drvdata ;
} ;
struct async_gen_req_ctx {
dma_addr_t iv_dma_addr ;
2019-04-18 16:39:05 +03:00
u8 * iv ;
2018-01-22 12:27:00 +03:00
enum drv_crypto_direction op_type ;
} ;
static inline struct device * drvdata_to_dev ( struct cc_drvdata * drvdata )
{
return & drvdata - > plat_dev - > dev ;
}
void __dump_byte_array ( const char * name , const u8 * buf , size_t len ) ;
static inline void dump_byte_array ( const char * name , const u8 * the_array ,
size_t size )
{
if ( cc_dump_bytes )
__dump_byte_array ( name , the_array , size ) ;
}
2019-06-17 11:46:28 +03:00
bool cc_wait_for_reset_completion ( struct cc_drvdata * drvdata ) ;
2020-11-22 10:51:53 +03:00
int init_cc_regs ( struct cc_drvdata * drvdata ) ;
2018-01-22 12:27:00 +03:00
void fini_cc_regs ( struct cc_drvdata * drvdata ) ;
2018-10-18 15:59:57 +03:00
unsigned int cc_get_default_hash_len ( struct cc_drvdata * drvdata ) ;
2018-01-22 12:27:00 +03:00
static inline void cc_iowrite ( struct cc_drvdata * drvdata , u32 reg , u32 val )
{
iowrite32 ( val , ( drvdata - > cc_base + reg ) ) ;
}
static inline u32 cc_ioread ( struct cc_drvdata * drvdata , u32 reg )
{
return ioread32 ( drvdata - > cc_base + reg ) ;
}
static inline gfp_t cc_gfp_flags ( struct crypto_async_request * req )
{
return ( req - > flags & CRYPTO_TFM_REQ_MAY_SLEEP ) ?
GFP_KERNEL : GFP_ATOMIC ;
}
2018-02-19 17:51:23 +03:00
static inline void set_queue_last_ind ( struct cc_drvdata * drvdata ,
struct cc_hw_desc * pdesc )
{
if ( drvdata - > hw_rev > = CC_HW_REV_712 )
set_queue_last_ind_bit ( pdesc ) ;
}
2018-01-22 12:27:00 +03:00
# endif /*__CC_DRIVER_H__*/