2019-05-27 08:55:05 +02:00
/* SPDX-License-Identifier: GPL-2.0-or-later */
2005-04-16 15:20:36 -07:00
/*
2007-10-15 09:50:19 +02:00
* Copyright ( c ) by Jaroslav Kysela < perex @ perex . cz >
2005-04-16 15:20:36 -07:00
* Takashi Iwai < tiwai @ suse . de >
*
* Generic memory allocators
*/
# ifndef __SOUND_MEMALLOC_H
# define __SOUND_MEMALLOC_H
2021-06-10 13:09:35 +02:00
# include <asm/page.h>
2005-04-16 15:20:36 -07:00
struct device ;
2021-06-09 18:25:50 +02:00
struct vm_area_struct ;
2005-04-16 15:20:36 -07:00
/*
* buffer device info
*/
struct snd_dma_device {
int type ; /* SNDRV_DMA_TYPE_XXX */
struct device * dev ; /* generic device */
} ;
2013-01-29 12:56:26 +01:00
# define snd_dma_continuous_data(x) ((struct device *)(__force unsigned long)(x))
2005-04-16 15:20:36 -07:00
/*
* buffer types
*/
# define SNDRV_DMA_TYPE_UNKNOWN 0 /* not defined */
# define SNDRV_DMA_TYPE_CONTINUOUS 1 /* continuous no-DMA memory */
# define SNDRV_DMA_TYPE_DEV 2 /* generic device continuous */
2021-08-02 09:28:02 +02:00
# define SNDRV_DMA_TYPE_DEV_WC 5 /* continuous write-combined */
2008-06-17 16:39:06 +02:00
# ifdef CONFIG_SND_DMA_SGBUF
2005-04-16 15:20:36 -07:00
# define SNDRV_DMA_TYPE_DEV_SG 3 /* generic device SG-buffer */
2021-08-02 09:28:02 +02:00
# define SNDRV_DMA_TYPE_DEV_WC_SG 6 /* SG write-combined */
2008-06-17 16:39:06 +02:00
# else
# define SNDRV_DMA_TYPE_DEV_SG SNDRV_DMA_TYPE_DEV /* no SG-buf support */
2021-08-02 09:28:02 +02:00
# define SNDRV_DMA_TYPE_DEV_WC_SG SNDRV_DMA_TYPE_DEV_WC
2008-06-17 16:39:06 +02:00
# endif
2013-10-24 14:25:32 +02:00
# ifdef CONFIG_GENERIC_ALLOCATOR
2013-10-23 11:47:43 +08:00
# define SNDRV_DMA_TYPE_DEV_IRAM 4 /* generic device iram-buffer */
2013-10-24 14:25:32 +02:00
# else
# define SNDRV_DMA_TYPE_DEV_IRAM SNDRV_DMA_TYPE_DEV
# endif
2019-11-05 09:01:36 +01:00
# define SNDRV_DMA_TYPE_VMALLOC 7 /* vmalloc'ed buffer */
2005-04-16 15:20:36 -07:00
/*
* info for buffer allocation
*/
struct snd_dma_buffer {
struct snd_dma_device dev ; /* device type */
unsigned char * area ; /* virtual pointer */
dma_addr_t addr ; /* physical address */
size_t bytes ; /* buffer size in bytes */
void * private_data ; /* private for allocator; don't touch */
} ;
2018-07-25 15:15:56 -05:00
/*
* return the pages matching with the given byte size
*/
static inline unsigned int snd_sgbuf_aligned_pages ( size_t size )
{
return ( size + PAGE_SIZE - 1 ) > > PAGE_SHIFT ;
}
2005-04-16 15:20:36 -07:00
/* allocate/release a buffer */
int snd_dma_alloc_pages ( int type , struct device * dev , size_t size ,
struct snd_dma_buffer * dmab ) ;
int snd_dma_alloc_pages_fallback ( int type , struct device * dev , size_t size ,
struct snd_dma_buffer * dmab ) ;
void snd_dma_free_pages ( struct snd_dma_buffer * dmab ) ;
2021-06-09 18:25:50 +02:00
int snd_dma_buffer_mmap ( struct snd_dma_buffer * dmab ,
struct vm_area_struct * area ) ;
2005-04-16 15:20:36 -07:00
2021-06-09 18:25:49 +02:00
dma_addr_t snd_sgbuf_get_addr ( struct snd_dma_buffer * dmab , size_t offset ) ;
struct page * snd_sgbuf_get_page ( struct snd_dma_buffer * dmab , size_t offset ) ;
unsigned int snd_sgbuf_get_chunk_size ( struct snd_dma_buffer * dmab ,
unsigned int ofs , unsigned int size ) ;
2021-07-15 09:58:23 +02:00
/* device-managed memory allocator */
struct snd_dma_buffer * snd_devm_alloc_pages ( struct device * dev , int type ,
size_t size ) ;
2005-04-16 15:20:36 -07:00
# endif /* __SOUND_MEMALLOC_H */