2018-05-08 16:20:54 +02:00
// SPDX-License-Identifier: GPL-2.0
2015-12-03 18:21:29 +01:00
/*
2018-05-08 16:20:54 +02:00
* Copyright ( C ) 2014 - 2018 Etnaviv Project
2015-12-03 18:21:29 +01:00
*/
# include <linux/platform_device.h>
# include <linux/sizes.h>
# include <linux/slab.h>
# include <linux/dma-mapping.h>
# include <linux/bitops.h>
# include "etnaviv_gpu.h"
# include "etnaviv_mmu.h"
# include "etnaviv_iommu.h"
# include "state_hi.xml.h"
# define PT_SIZE SZ_2M
# define PT_ENTRIES (PT_SIZE / sizeof(u32))
# define GPU_MEM_START 0x80000000
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain {
struct etnaviv_iommu_domain base ;
u32 * pgtable_cpu ;
dma_addr_t pgtable_dma ;
2015-12-03 18:21:29 +01:00
} ;
2017-09-07 17:06:28 +02:00
static struct etnaviv_iommuv1_domain *
to_etnaviv_domain ( struct etnaviv_iommu_domain * domain )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
return container_of ( domain , struct etnaviv_iommuv1_domain , base ) ;
2015-12-03 18:21:29 +01:00
}
2017-09-07 17:06:28 +02:00
static int __etnaviv_iommu_init ( struct etnaviv_iommuv1_domain * etnaviv_domain )
2015-12-03 18:21:29 +01:00
{
u32 * p ;
2017-09-07 15:36:57 +02:00
int i ;
2015-12-03 18:21:29 +01:00
2018-04-17 12:00:46 +02:00
etnaviv_domain - > base . bad_page_cpu =
dma_alloc_wc ( etnaviv_domain - > base . dev , SZ_4K ,
& etnaviv_domain - > base . bad_page_dma ,
GFP_KERNEL ) ;
2017-09-07 17:06:28 +02:00
if ( ! etnaviv_domain - > base . bad_page_cpu )
2015-12-03 18:21:29 +01:00
return - ENOMEM ;
2017-09-07 17:06:28 +02:00
p = etnaviv_domain - > base . bad_page_cpu ;
2015-12-03 18:21:29 +01:00
for ( i = 0 ; i < SZ_4K / 4 ; i + + )
* p + + = 0xdead55aa ;
2018-04-17 12:00:46 +02:00
etnaviv_domain - > pgtable_cpu = dma_alloc_wc ( etnaviv_domain - > base . dev ,
PT_SIZE ,
& etnaviv_domain - > pgtable_dma ,
GFP_KERNEL ) ;
2017-09-07 17:06:28 +02:00
if ( ! etnaviv_domain - > pgtable_cpu ) {
2018-04-17 12:00:46 +02:00
dma_free_wc ( etnaviv_domain - > base . dev , SZ_4K ,
etnaviv_domain - > base . bad_page_cpu ,
etnaviv_domain - > base . bad_page_dma ) ;
2017-09-07 15:36:57 +02:00
return - ENOMEM ;
2015-12-03 18:21:29 +01:00
}
2017-11-27 15:33:28 +01:00
memset32 ( etnaviv_domain - > pgtable_cpu , etnaviv_domain - > base . bad_page_dma ,
PT_ENTRIES ) ;
2015-12-03 18:21:29 +01:00
return 0 ;
}
2017-09-07 17:06:28 +02:00
static void etnaviv_iommuv1_domain_free ( struct etnaviv_iommu_domain * domain )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain =
to_etnaviv_domain ( domain ) ;
2015-12-03 18:21:29 +01:00
2018-04-17 12:00:46 +02:00
dma_free_wc ( etnaviv_domain - > base . dev , PT_SIZE ,
etnaviv_domain - > pgtable_cpu , etnaviv_domain - > pgtable_dma ) ;
2015-12-03 18:21:29 +01:00
2018-04-17 12:00:46 +02:00
dma_free_wc ( etnaviv_domain - > base . dev , SZ_4K ,
etnaviv_domain - > base . bad_page_cpu ,
etnaviv_domain - > base . bad_page_dma ) ;
2015-12-03 18:21:29 +01:00
kfree ( etnaviv_domain ) ;
}
2017-09-07 17:06:28 +02:00
static int etnaviv_iommuv1_map ( struct etnaviv_iommu_domain * domain ,
unsigned long iova , phys_addr_t paddr ,
size_t size , int prot )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain = to_etnaviv_domain ( domain ) ;
2017-09-07 15:56:01 +02:00
unsigned int index = ( iova - GPU_MEM_START ) / SZ_4K ;
2015-12-03 18:21:29 +01:00
if ( size ! = SZ_4K )
return - EINVAL ;
2017-09-07 17:06:28 +02:00
etnaviv_domain - > pgtable_cpu [ index ] = paddr ;
2015-12-03 18:21:29 +01:00
return 0 ;
}
2017-09-07 17:06:28 +02:00
static size_t etnaviv_iommuv1_unmap ( struct etnaviv_iommu_domain * domain ,
2015-12-03 18:21:29 +01:00
unsigned long iova , size_t size )
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain =
to_etnaviv_domain ( domain ) ;
2017-09-07 15:56:01 +02:00
unsigned int index = ( iova - GPU_MEM_START ) / SZ_4K ;
2015-12-03 18:21:29 +01:00
if ( size ! = SZ_4K )
return - EINVAL ;
2017-09-07 17:06:28 +02:00
etnaviv_domain - > pgtable_cpu [ index ] = etnaviv_domain - > base . bad_page_dma ;
2015-12-03 18:21:29 +01:00
return SZ_4K ;
}
2017-09-07 17:06:28 +02:00
static size_t etnaviv_iommuv1_dump_size ( struct etnaviv_iommu_domain * domain )
2015-12-03 18:21:29 +01:00
{
return PT_SIZE ;
}
2017-09-07 17:06:28 +02:00
static void etnaviv_iommuv1_dump ( struct etnaviv_iommu_domain * domain , void * buf )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain =
to_etnaviv_domain ( domain ) ;
2015-12-03 18:21:29 +01:00
2017-09-07 17:06:28 +02:00
memcpy ( buf , etnaviv_domain - > pgtable_cpu , PT_SIZE ) ;
2015-12-03 18:21:29 +01:00
}
2016-08-16 11:31:09 +02:00
void etnaviv_iommuv1_restore ( struct etnaviv_gpu * gpu )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain =
2016-08-16 11:31:09 +02:00
to_etnaviv_domain ( gpu - > mmu - > domain ) ;
2015-12-03 18:21:29 +01:00
u32 pgtable ;
2016-08-16 11:48:49 +02:00
/* set base addresses */
gpu_write ( gpu , VIVS_MC_MEMORY_BASE_ADDR_RA , gpu - > memory_base ) ;
gpu_write ( gpu , VIVS_MC_MEMORY_BASE_ADDR_FE , gpu - > memory_base ) ;
gpu_write ( gpu , VIVS_MC_MEMORY_BASE_ADDR_TX , gpu - > memory_base ) ;
gpu_write ( gpu , VIVS_MC_MEMORY_BASE_ADDR_PEZ , gpu - > memory_base ) ;
gpu_write ( gpu , VIVS_MC_MEMORY_BASE_ADDR_PE , gpu - > memory_base ) ;
2015-12-03 18:21:29 +01:00
/* set page table address in MC */
2017-09-07 17:06:28 +02:00
pgtable = ( u32 ) etnaviv_domain - > pgtable_dma ;
2015-12-03 18:21:29 +01:00
gpu_write ( gpu , VIVS_MC_MMU_FE_PAGE_TABLE , pgtable ) ;
gpu_write ( gpu , VIVS_MC_MMU_TX_PAGE_TABLE , pgtable ) ;
gpu_write ( gpu , VIVS_MC_MMU_PE_PAGE_TABLE , pgtable ) ;
gpu_write ( gpu , VIVS_MC_MMU_PEZ_PAGE_TABLE , pgtable ) ;
gpu_write ( gpu , VIVS_MC_MMU_RA_PAGE_TABLE , pgtable ) ;
}
2018-01-11 11:34:59 +00:00
static const struct etnaviv_iommu_domain_ops etnaviv_iommuv1_ops = {
2017-09-07 17:06:28 +02:00
. free = etnaviv_iommuv1_domain_free ,
. map = etnaviv_iommuv1_map ,
. unmap = etnaviv_iommuv1_unmap ,
. dump_size = etnaviv_iommuv1_dump_size ,
. dump = etnaviv_iommuv1_dump ,
} ;
struct etnaviv_iommu_domain *
etnaviv_iommuv1_domain_alloc ( struct etnaviv_gpu * gpu )
2015-12-03 18:21:29 +01:00
{
2017-09-07 17:06:28 +02:00
struct etnaviv_iommuv1_domain * etnaviv_domain ;
struct etnaviv_iommu_domain * domain ;
2015-12-03 18:21:29 +01:00
int ret ;
etnaviv_domain = kzalloc ( sizeof ( * etnaviv_domain ) , GFP_KERNEL ) ;
if ( ! etnaviv_domain )
return NULL ;
2017-09-07 17:06:28 +02:00
domain = & etnaviv_domain - > base ;
2015-12-03 18:21:29 +01:00
2017-09-07 17:06:28 +02:00
domain - > dev = gpu - > dev ;
domain - > base = GPU_MEM_START ;
domain - > size = PT_ENTRIES * SZ_4K ;
domain - > ops = & etnaviv_iommuv1_ops ;
2015-12-03 18:21:29 +01:00
ret = __etnaviv_iommu_init ( etnaviv_domain ) ;
if ( ret )
goto out_free ;
2017-09-07 17:06:28 +02:00
return & etnaviv_domain - > base ;
2015-12-03 18:21:29 +01:00
out_free :
kfree ( etnaviv_domain ) ;
return NULL ;
}