2018-09-05 14:25:12 +08:00
// SPDX-License-Identifier: GPL-2.0
// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
# include <linux/cache.h>
2020-09-11 10:56:52 +02:00
# include <linux/dma-map-ops.h>
2018-09-05 14:25:12 +08:00
# include <linux/genalloc.h>
# include <linux/highmem.h>
# include <linux/io.h>
# include <linux/mm.h>
# include <linux/scatterlist.h>
# include <linux/types.h>
# include <asm/cache.h>
static inline void cache_op ( phys_addr_t paddr , size_t size ,
void ( * fn ) ( unsigned long start , unsigned long end ) )
{
2019-07-30 17:02:26 +08:00
struct page * page = phys_to_page ( paddr ) ;
void * start = __va ( page_to_phys ( page ) ) ;
unsigned long offset = offset_in_page ( paddr ) ;
size_t left = size ;
2018-09-05 14:25:12 +08:00
do {
size_t len = left ;
2019-07-30 17:02:26 +08:00
if ( offset + len > PAGE_SIZE )
len = PAGE_SIZE - offset ;
2018-09-05 14:25:12 +08:00
if ( PageHighMem ( page ) ) {
2019-07-30 17:02:26 +08:00
start = kmap_atomic ( page ) ;
2018-09-05 14:25:12 +08:00
2019-07-30 17:02:26 +08:00
fn ( ( unsigned long ) start + offset ,
( unsigned long ) start + offset + len ) ;
2018-09-05 14:25:12 +08:00
2019-07-30 17:02:26 +08:00
kunmap_atomic ( start ) ;
2018-09-05 14:25:12 +08:00
} else {
2019-07-30 17:02:26 +08:00
fn ( ( unsigned long ) start + offset ,
( unsigned long ) start + offset + len ) ;
2018-09-05 14:25:12 +08:00
}
offset = 0 ;
2019-07-30 17:02:26 +08:00
2018-09-05 14:25:12 +08:00
page + + ;
2019-07-30 17:02:26 +08:00
start + = PAGE_SIZE ;
2018-09-05 14:25:12 +08:00
left - = len ;
} while ( left ) ;
}
2019-07-30 17:02:26 +08:00
static void dma_wbinv_set_zero_range ( unsigned long start , unsigned long end )
{
memset ( ( void * ) start , 0 , end - start ) ;
dma_wbinv_range ( start , end ) ;
}
void arch_dma_prep_coherent ( struct page * page , size_t size )
{
cache_op ( page_to_phys ( page ) , size , dma_wbinv_set_zero_range ) ;
}
2019-11-07 18:03:11 +01:00
void arch_sync_dma_for_device ( phys_addr_t paddr , size_t size ,
enum dma_data_direction dir )
2018-09-05 14:25:12 +08:00
{
switch ( dir ) {
case DMA_TO_DEVICE :
cache_op ( paddr , size , dma_wb_range ) ;
break ;
case DMA_FROM_DEVICE :
case DMA_BIDIRECTIONAL :
cache_op ( paddr , size , dma_wbinv_range ) ;
break ;
default :
BUG ( ) ;
}
}
2019-11-07 18:03:11 +01:00
void arch_sync_dma_for_cpu ( phys_addr_t paddr , size_t size ,
enum dma_data_direction dir )
2018-09-05 14:25:12 +08:00
{
switch ( dir ) {
case DMA_TO_DEVICE :
2019-07-30 17:16:28 +08:00
return ;
2018-09-05 14:25:12 +08:00
case DMA_FROM_DEVICE :
case DMA_BIDIRECTIONAL :
2019-07-30 17:16:28 +08:00
cache_op ( paddr , size , dma_inv_range ) ;
2018-09-05 14:25:12 +08:00
break ;
default :
BUG ( ) ;
}
}