2019-05-27 08:55:21 +02:00
/* SPDX-License-Identifier: GPL-2.0-only */
2016-06-08 17:50:58 +08:00
/*
* Copyright ( c ) 2015 - 2016 MediaTek Inc .
* Author : Honghui Zhang < honghui . zhang @ mediatek . com >
*/
# ifndef _MTK_IOMMU_H_
# define _MTK_IOMMU_H_
# include <linux/clk.h>
# include <linux/component.h>
# include <linux/device.h>
# include <linux/io.h>
2019-02-05 10:37:31 -06:00
# include <linux/io-pgtable.h>
2016-06-08 17:50:58 +08:00
# include <linux/iommu.h>
# include <linux/list.h>
# include <linux/spinlock.h>
2020-07-13 12:16:48 +02:00
# include <linux/dma-mapping.h>
2016-06-08 17:50:58 +08:00
# include <soc/mediatek/smi.h>
2021-01-11 19:18:49 +08:00
# include <dt-bindings/memory/mtk-memory-port.h>
2016-06-08 17:50:58 +08:00
2020-07-03 12:41:23 +08:00
# define MTK_LARB_COM_MAX 8
# define MTK_LARB_SUBCOM_MAX 4
2021-01-11 19:19:09 +08:00
# define MTK_IOMMU_GROUP_MAX 8
2016-06-08 17:50:58 +08:00
struct mtk_iommu_suspend_reg {
2020-07-03 12:41:19 +08:00
union {
u32 standard_axi_mode ; /* v1 */
u32 misc_ctrl ; /* v2 */
} ;
2016-06-08 17:50:58 +08:00
u32 dcm_dis ;
u32 ctrl_reg ;
u32 int_control0 ;
u32 int_main_control ;
2018-03-18 09:52:54 +08:00
u32 ivrp_paddr ;
2019-08-24 11:02:06 +08:00
u32 vld_pa_rng ;
2020-07-03 12:41:24 +08:00
u32 wr_len_ctrl ;
2016-06-08 17:50:58 +08:00
} ;
2017-08-21 19:00:16 +08:00
enum mtk_iommu_plat {
M4U_MT2701 ,
M4U_MT2712 ,
2020-07-03 12:41:27 +08:00
M4U_MT6779 ,
2020-09-07 12:16:49 +02:00
M4U_MT8167 ,
2017-08-21 19:00:16 +08:00
M4U_MT8173 ,
2019-08-24 11:02:02 +08:00
M4U_MT8183 ,
2021-01-11 19:19:13 +08:00
M4U_MT8192 ,
2017-08-21 19:00:16 +08:00
} ;
2021-01-11 19:19:07 +08:00
struct mtk_iommu_iova_region ;
2019-08-24 11:01:47 +08:00
struct mtk_iommu_plat_data {
enum mtk_iommu_plat m4u_plat ;
2020-07-03 12:41:20 +08:00
u32 flags ;
2020-07-03 12:41:22 +08:00
u32 inv_sel_reg ;
2021-01-11 19:19:07 +08:00
unsigned int iova_region_nr ;
const struct mtk_iommu_iova_region * iova_region ;
2020-07-03 12:41:23 +08:00
unsigned char larbid_remap [ MTK_LARB_COM_MAX ] [ MTK_LARB_SUBCOM_MAX ] ;
2019-08-24 11:01:47 +08:00
} ;
2016-06-08 17:50:58 +08:00
struct mtk_iommu_domain ;
struct mtk_iommu_data {
void __iomem * base ;
int irq ;
struct device * dev ;
struct clk * bclk ;
phys_addr_t protect_base ; /* protect memory base */
struct mtk_iommu_suspend_reg reg ;
struct mtk_iommu_domain * m4u_dom ;
2021-01-11 19:19:09 +08:00
struct iommu_group * m4u_group [ MTK_IOMMU_GROUP_MAX ] ;
2016-06-08 17:50:58 +08:00
bool enable_4GB ;
2019-11-04 15:01:03 +08:00
spinlock_t tlb_lock ; /* lock for tlb range flush */
2017-02-03 12:57:32 +01:00
struct iommu_device iommu ;
2019-08-24 11:01:47 +08:00
const struct mtk_iommu_plat_data * plat_data ;
2021-01-11 19:18:59 +08:00
struct device * smicomm_dev ;
2017-08-21 19:00:17 +08:00
2020-06-25 15:08:31 +02:00
struct dma_iommu_mapping * mapping ; /* For mtk_iommu_v1.c */
2017-08-21 19:00:17 +08:00
struct list_head list ;
2019-08-24 11:02:08 +08:00
struct mtk_smi_larb_iommu larb_imu [ MTK_LARB_NR_MAX ] ;
2016-06-08 17:50:58 +08:00
} ;
2016-08-09 15:46:46 +02:00
static inline int mtk_iommu_bind ( struct device * dev )
2016-06-08 17:50:58 +08:00
{
struct mtk_iommu_data * data = dev_get_drvdata ( dev ) ;
2019-08-24 11:02:08 +08:00
return component_bind_all ( dev , & data - > larb_imu ) ;
2016-06-08 17:50:58 +08:00
}
2016-08-09 15:46:46 +02:00
static inline void mtk_iommu_unbind ( struct device * dev )
2016-06-08 17:50:58 +08:00
{
struct mtk_iommu_data * data = dev_get_drvdata ( dev ) ;
2019-08-24 11:02:08 +08:00
component_unbind_all ( dev , & data - > larb_imu ) ;
2016-06-08 17:50:58 +08:00
}
# endif