2019-05-28 10:10:04 -07:00
/* SPDX-License-Identifier: GPL-2.0-only */
2013-03-22 16:34:01 +02:00
/*
2016-11-08 19:51:33 +02:00
* Copyright ( c ) 2012 - 2015 , NVIDIA Corporation .
2013-03-22 16:34:01 +02:00
*/
# ifndef HOST1X_DEV_H
# define HOST1X_DEV_H
# include <linux/device.h>
2016-12-14 13:16:14 +02:00
# include <linux/iommu.h>
# include <linux/iova.h>
2017-03-21 08:54:21 +01:00
# include <linux/platform_device.h>
2017-03-21 08:54:22 +01:00
# include <linux/reset.h>
2013-03-22 16:34:01 +02:00
2017-03-21 08:54:21 +01:00
# include "cdma.h"
2013-03-22 16:34:03 +02:00
# include "channel.h"
2013-03-22 16:34:02 +02:00
# include "intr.h"
2013-03-22 16:34:03 +02:00
# include "job.h"
2017-03-21 08:54:21 +01:00
# include "syncpt.h"
2013-03-22 16:34:01 +02:00
struct host1x_syncpt ;
2013-10-14 15:21:53 +03:00
struct host1x_syncpt_base ;
2013-03-22 16:34:03 +02:00
struct host1x_channel ;
struct host1x_cdma ;
struct host1x_job ;
struct push_buffer ;
2013-03-22 16:34:04 +02:00
struct output ;
struct dentry ;
2013-03-22 16:34:03 +02:00
struct host1x_channel_ops {
int ( * init ) ( struct host1x_channel * channel , struct host1x * host ,
unsigned int id ) ;
int ( * submit ) ( struct host1x_job * job ) ;
} ;
struct host1x_cdma_ops {
void ( * start ) ( struct host1x_cdma * cdma ) ;
void ( * stop ) ( struct host1x_cdma * cdma ) ;
void ( * flush ) ( struct host1x_cdma * cdma ) ;
2021-03-29 16:38:32 +03:00
int ( * timeout_init ) ( struct host1x_cdma * cdma ) ;
2013-03-22 16:34:03 +02:00
void ( * timeout_destroy ) ( struct host1x_cdma * cdma ) ;
void ( * freeze ) ( struct host1x_cdma * cdma ) ;
void ( * resume ) ( struct host1x_cdma * cdma , u32 getptr ) ;
void ( * timeout_cpu_incr ) ( struct host1x_cdma * cdma , u32 getptr ,
u32 syncpt_incrs , u32 syncval , u32 nr_slots ) ;
} ;
struct host1x_pushbuffer_ops {
void ( * init ) ( struct push_buffer * pb ) ;
} ;
2013-03-22 16:34:01 +02:00
2013-03-22 16:34:04 +02:00
struct host1x_debug_ops {
void ( * debug_init ) ( struct dentry * de ) ;
void ( * show_channel_cdma ) ( struct host1x * host ,
struct host1x_channel * ch ,
struct output * o ) ;
void ( * show_channel_fifo ) ( struct host1x * host ,
struct host1x_channel * ch ,
struct output * o ) ;
void ( * show_mlocks ) ( struct host1x * host , struct output * output ) ;
} ;
2013-03-22 16:34:01 +02:00
struct host1x_syncpt_ops {
void ( * restore ) ( struct host1x_syncpt * syncpt ) ;
void ( * restore_wait_base ) ( struct host1x_syncpt * syncpt ) ;
void ( * load_wait_base ) ( struct host1x_syncpt * syncpt ) ;
u32 ( * load ) ( struct host1x_syncpt * syncpt ) ;
2013-05-29 13:26:08 +03:00
int ( * cpu_incr ) ( struct host1x_syncpt * syncpt ) ;
2017-09-28 15:50:39 +03:00
void ( * assign_to_channel ) ( struct host1x_syncpt * syncpt ,
struct host1x_channel * channel ) ;
void ( * enable_protection ) ( struct host1x * host ) ;
2013-03-22 16:34:01 +02:00
} ;
2013-03-22 16:34:02 +02:00
struct host1x_intr_ops {
int ( * init_host_sync ) ( struct host1x * host , u32 cpm ,
void ( * syncpt_thresh_work ) ( struct work_struct * work ) ) ;
void ( * set_syncpt_threshold ) (
2016-06-23 11:19:00 +02:00
struct host1x * host , unsigned int id , u32 thresh ) ;
void ( * enable_syncpt_intr ) ( struct host1x * host , unsigned int id ) ;
void ( * disable_syncpt_intr ) ( struct host1x * host , unsigned int id ) ;
2013-03-22 16:34:02 +02:00
void ( * disable_all_syncpt_intrs ) ( struct host1x * host ) ;
int ( * free_syncpt_irq ) ( struct host1x * host ) ;
} ;
2019-02-01 14:28:22 +01:00
struct host1x_sid_entry {
unsigned int base ;
unsigned int offset ;
unsigned int limit ;
} ;
2013-03-22 16:34:01 +02:00
struct host1x_info {
2016-06-22 16:44:07 +02:00
unsigned int nb_channels ; /* host1x: number of channels supported */
unsigned int nb_pts ; /* host1x: number of syncpoints supported */
unsigned int nb_bases ; /* host1x: number of syncpoint bases supported */
unsigned int nb_mlocks ; /* host1x: number of mlocks supported */
int ( * init ) ( struct host1x * host1x ) ; /* initialize per SoC ops */
unsigned int sync_offset ; /* offset of syncpoint registers */
u64 dma_mask ; /* mask of addressable memory */
2019-10-28 13:37:14 +01:00
bool has_wide_gather ; /* supports GATHER_W opcode */
2017-09-05 11:43:05 +03:00
bool has_hypervisor ; /* has hypervisor registers */
2019-02-01 14:28:22 +01:00
unsigned int num_sid_entries ;
const struct host1x_sid_entry * sid_table ;
2021-03-29 16:38:34 +03:00
/*
* On T20 - T148 , the boot chain may setup DC to increment syncpoints
* 26 / 27 on VBLANK . As such we cannot use these syncpoints until
* the display driver disables VBLANK increments .
*/
bool reserve_vblank_syncpts ;
2013-03-22 16:34:01 +02:00
} ;
struct host1x {
const struct host1x_info * info ;
void __iomem * regs ;
2017-09-05 11:43:05 +03:00
void __iomem * hv_regs ; /* hypervisor region */
2013-03-22 16:34:01 +02:00
struct host1x_syncpt * syncpt ;
2013-10-14 15:21:53 +03:00
struct host1x_syncpt_base * bases ;
2013-03-22 16:34:01 +02:00
struct device * dev ;
struct clk * clk ;
2017-03-21 08:54:22 +01:00
struct reset_control * rst ;
2013-03-22 16:34:01 +02:00
2017-11-14 16:11:28 +01:00
struct iommu_group * group ;
2016-12-14 13:16:14 +02:00
struct iommu_domain * domain ;
struct iova_domain iova ;
dma_addr_t iova_end ;
2013-03-22 16:34:02 +02:00
struct mutex intr_mutex ;
int intr_syncpt_irq ;
2013-03-22 16:34:01 +02:00
const struct host1x_syncpt_ops * syncpt_op ;
2013-03-22 16:34:02 +02:00
const struct host1x_intr_ops * intr_op ;
2013-03-22 16:34:03 +02:00
const struct host1x_channel_ops * channel_op ;
const struct host1x_cdma_ops * cdma_op ;
const struct host1x_pushbuffer_ops * cdma_pb_op ;
2013-03-22 16:34:04 +02:00
const struct host1x_debug_ops * debug_op ;
2013-03-22 16:34:02 +02:00
2013-03-22 16:34:03 +02:00
struct host1x_syncpt * nop_sp ;
2016-11-08 19:51:33 +02:00
struct mutex syncpt_mutex ;
2017-06-15 02:18:42 +03:00
struct host1x_channel_list channel_list ;
2013-03-22 16:34:04 +02:00
struct dentry * debugfs ;
2013-03-22 16:34:07 +02:00
2013-10-14 14:43:22 +02:00
struct mutex devices_lock ;
struct list_head devices ;
struct list_head list ;
2019-09-09 14:28:46 +02:00
struct device_dma_parameters dma_parms ;
2013-03-22 16:34:01 +02:00
} ;
2017-09-05 11:43:05 +03:00
void host1x_hypervisor_writel ( struct host1x * host1x , u32 r , u32 v ) ;
u32 host1x_hypervisor_readl ( struct host1x * host1x , u32 r ) ;
2013-03-22 16:34:01 +02:00
void host1x_sync_writel ( struct host1x * host1x , u32 r , u32 v ) ;
u32 host1x_sync_readl ( struct host1x * host1x , u32 r ) ;
2013-03-22 16:34:03 +02:00
void host1x_ch_writel ( struct host1x_channel * ch , u32 r , u32 v ) ;
u32 host1x_ch_readl ( struct host1x_channel * ch , u32 r ) ;
2013-03-22 16:34:01 +02:00
static inline void host1x_hw_syncpt_restore ( struct host1x * host ,
struct host1x_syncpt * sp )
{
host - > syncpt_op - > restore ( sp ) ;
}
static inline void host1x_hw_syncpt_restore_wait_base ( struct host1x * host ,
struct host1x_syncpt * sp )
{
host - > syncpt_op - > restore_wait_base ( sp ) ;
}
static inline void host1x_hw_syncpt_load_wait_base ( struct host1x * host ,
struct host1x_syncpt * sp )
{
host - > syncpt_op - > load_wait_base ( sp ) ;
}
static inline u32 host1x_hw_syncpt_load ( struct host1x * host ,
struct host1x_syncpt * sp )
{
return host - > syncpt_op - > load ( sp ) ;
}
2013-05-29 13:26:08 +03:00
static inline int host1x_hw_syncpt_cpu_incr ( struct host1x * host ,
struct host1x_syncpt * sp )
2013-03-22 16:34:01 +02:00
{
2013-05-29 13:26:08 +03:00
return host - > syncpt_op - > cpu_incr ( sp ) ;
2013-03-22 16:34:01 +02:00
}
2017-09-28 15:50:39 +03:00
static inline void host1x_hw_syncpt_assign_to_channel (
struct host1x * host , struct host1x_syncpt * sp ,
struct host1x_channel * ch )
{
return host - > syncpt_op - > assign_to_channel ( sp , ch ) ;
}
static inline void host1x_hw_syncpt_enable_protection ( struct host1x * host )
{
return host - > syncpt_op - > enable_protection ( host ) ;
}
2013-03-22 16:34:02 +02:00
static inline int host1x_hw_intr_init_host_sync ( struct host1x * host , u32 cpm ,
void ( * syncpt_thresh_work ) ( struct work_struct * ) )
{
return host - > intr_op - > init_host_sync ( host , cpm , syncpt_thresh_work ) ;
}
static inline void host1x_hw_intr_set_syncpt_threshold ( struct host1x * host ,
2016-06-23 11:19:00 +02:00
unsigned int id ,
u32 thresh )
2013-03-22 16:34:02 +02:00
{
host - > intr_op - > set_syncpt_threshold ( host , id , thresh ) ;
}
static inline void host1x_hw_intr_enable_syncpt_intr ( struct host1x * host ,
2016-06-23 11:19:00 +02:00
unsigned int id )
2013-03-22 16:34:02 +02:00
{
host - > intr_op - > enable_syncpt_intr ( host , id ) ;
}
static inline void host1x_hw_intr_disable_syncpt_intr ( struct host1x * host ,
2016-06-23 11:19:00 +02:00
unsigned int id )
2013-03-22 16:34:02 +02:00
{
host - > intr_op - > disable_syncpt_intr ( host , id ) ;
}
static inline void host1x_hw_intr_disable_all_syncpt_intrs ( struct host1x * host )
{
host - > intr_op - > disable_all_syncpt_intrs ( host ) ;
}
static inline int host1x_hw_intr_free_syncpt_irq ( struct host1x * host )
{
return host - > intr_op - > free_syncpt_irq ( host ) ;
}
2013-03-22 16:34:03 +02:00
static inline int host1x_hw_channel_init ( struct host1x * host ,
struct host1x_channel * channel ,
2016-06-23 11:19:00 +02:00
unsigned int id )
2013-03-22 16:34:03 +02:00
{
2016-06-23 11:19:00 +02:00
return host - > channel_op - > init ( channel , host , id ) ;
2013-03-22 16:34:03 +02:00
}
static inline int host1x_hw_channel_submit ( struct host1x * host ,
struct host1x_job * job )
{
return host - > channel_op - > submit ( job ) ;
}
static inline void host1x_hw_cdma_start ( struct host1x * host ,
struct host1x_cdma * cdma )
{
host - > cdma_op - > start ( cdma ) ;
}
static inline void host1x_hw_cdma_stop ( struct host1x * host ,
struct host1x_cdma * cdma )
{
host - > cdma_op - > stop ( cdma ) ;
}
static inline void host1x_hw_cdma_flush ( struct host1x * host ,
struct host1x_cdma * cdma )
{
host - > cdma_op - > flush ( cdma ) ;
}
static inline int host1x_hw_cdma_timeout_init ( struct host1x * host ,
2021-03-29 16:38:32 +03:00
struct host1x_cdma * cdma )
2013-03-22 16:34:03 +02:00
{
2021-03-29 16:38:32 +03:00
return host - > cdma_op - > timeout_init ( cdma ) ;
2013-03-22 16:34:03 +02:00
}
static inline void host1x_hw_cdma_timeout_destroy ( struct host1x * host ,
struct host1x_cdma * cdma )
{
host - > cdma_op - > timeout_destroy ( cdma ) ;
}
static inline void host1x_hw_cdma_freeze ( struct host1x * host ,
struct host1x_cdma * cdma )
{
host - > cdma_op - > freeze ( cdma ) ;
}
static inline void host1x_hw_cdma_resume ( struct host1x * host ,
struct host1x_cdma * cdma , u32 getptr )
{
host - > cdma_op - > resume ( cdma , getptr ) ;
}
static inline void host1x_hw_cdma_timeout_cpu_incr ( struct host1x * host ,
struct host1x_cdma * cdma ,
u32 getptr ,
u32 syncpt_incrs ,
u32 syncval , u32 nr_slots )
{
host - > cdma_op - > timeout_cpu_incr ( cdma , getptr , syncpt_incrs , syncval ,
nr_slots ) ;
}
static inline void host1x_hw_pushbuffer_init ( struct host1x * host ,
struct push_buffer * pb )
{
host - > cdma_pb_op - > init ( pb ) ;
}
2013-03-22 16:34:04 +02:00
static inline void host1x_hw_debug_init ( struct host1x * host , struct dentry * de )
{
if ( host - > debug_op & & host - > debug_op - > debug_init )
host - > debug_op - > debug_init ( de ) ;
}
static inline void host1x_hw_show_channel_cdma ( struct host1x * host ,
struct host1x_channel * channel ,
struct output * o )
{
host - > debug_op - > show_channel_cdma ( host , channel , o ) ;
}
static inline void host1x_hw_show_channel_fifo ( struct host1x * host ,
struct host1x_channel * channel ,
struct output * o )
{
host - > debug_op - > show_channel_fifo ( host , channel , o ) ;
}
static inline void host1x_hw_show_mlocks ( struct host1x * host , struct output * o )
{
host - > debug_op - > show_mlocks ( host , o ) ;
}
2013-09-02 09:48:53 +02:00
extern struct platform_driver tegra_mipi_driver ;
2013-03-22 16:34:01 +02:00
# endif