2014-10-28 12:48:00 +01:00
/*
* Copyright 2013 Red Hat
* All Rights Reserved .
*
* Permission is hereby granted , free of charge , to any person obtaining a
* copy of this software and associated documentation files ( the " Software " ) ,
* to deal in the Software without restriction , including without limitation
* the rights to use , copy , modify , merge , publish , distribute , sublicense ,
* and / or sell copies of the Software , and to permit persons to whom the
* Software is furnished to do so , subject to the following conditions :
*
* The above copyright notice and this permission notice ( including the next
* paragraph ) shall be included in all copies or substantial portions of the
* Software .
*
* THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
* IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL
* THE AUTHORS AND / OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM , DAMAGES OR
* OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE ,
* ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE .
*/
# ifndef VIRTGPU_DRM_H
# define VIRTGPU_DRM_H
2015-11-30 15:10:55 +01:00
# include "drm.h"
2014-10-28 12:48:00 +01:00
2016-04-07 19:38:49 +01:00
# if defined(__cplusplus)
extern " C " {
# endif
2014-10-28 12:48:00 +01:00
/* Please note that modifications to all structs defined here are
* subject to backwards - compatibility constraints .
*
2015-11-30 15:10:39 +01:00
* Do not use pointers , use __u64 instead for 32 bit / 64 bit user / kernel
2014-10-28 12:48:00 +01:00
* compatibility Keep fields aligned to their size
*/
# define DRM_VIRTGPU_MAP 0x01
# define DRM_VIRTGPU_EXECBUFFER 0x02
# define DRM_VIRTGPU_GETPARAM 0x03
# define DRM_VIRTGPU_RESOURCE_CREATE 0x04
# define DRM_VIRTGPU_RESOURCE_INFO 0x05
# define DRM_VIRTGPU_TRANSFER_FROM_HOST 0x06
# define DRM_VIRTGPU_TRANSFER_TO_HOST 0x07
# define DRM_VIRTGPU_WAIT 0x08
# define DRM_VIRTGPU_GET_CAPS 0x09
2020-09-23 17:32:00 -07:00
# define DRM_VIRTGPU_RESOURCE_CREATE_BLOB 0x0a
2021-09-21 16:20:14 -07:00
# define DRM_VIRTGPU_CONTEXT_INIT 0x0b
2014-10-28 12:48:00 +01:00
2018-11-12 17:51:55 +01:00
# define VIRTGPU_EXECBUF_FENCE_FD_IN 0x01
# define VIRTGPU_EXECBUF_FENCE_FD_OUT 0x02
2021-09-21 16:20:14 -07:00
# define VIRTGPU_EXECBUF_RING_IDX 0x04
2018-11-12 17:51:55 +01:00
# define VIRTGPU_EXECBUF_FLAGS (\
VIRTGPU_EXECBUF_FENCE_FD_IN | \
VIRTGPU_EXECBUF_FENCE_FD_OUT | \
2021-09-21 16:20:14 -07:00
VIRTGPU_EXECBUF_RING_IDX | \
2018-11-12 17:51:55 +01:00
0 )
2014-10-28 12:48:00 +01:00
struct drm_virtgpu_map {
2015-11-30 15:10:39 +01:00
__u64 offset ; /* use for mmap system call */
__u32 handle ;
__u32 pad ;
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_execbuffer {
2018-11-12 17:51:55 +01:00
__u32 flags ;
2015-11-30 15:10:39 +01:00
__u32 size ;
__u64 command ; /* void* */
__u64 bo_handles ;
__u32 num_bo_handles ;
2018-11-12 17:51:56 +01:00
__s32 fence_fd ; /* in/out fence fd (see VIRTGPU_EXECBUF_FENCE_FD_IN/OUT) */
2021-09-21 16:20:14 -07:00
__u32 ring_idx ; /* command ring index (see VIRTGPU_EXECBUF_RING_IDX) */
__u32 pad ;
2014-10-28 12:48:00 +01:00
} ;
# define VIRTGPU_PARAM_3D_FEATURES 1 /* do we have 3D features in the hw */
2018-02-21 11:50:03 +10:00
# define VIRTGPU_PARAM_CAPSET_QUERY_FIX 2 /* do we have the capset fix */
2020-09-23 17:32:00 -07:00
# define VIRTGPU_PARAM_RESOURCE_BLOB 3 /* DRM_VIRTGPU_RESOURCE_CREATE_BLOB */
2020-09-23 17:32:01 -07:00
# define VIRTGPU_PARAM_HOST_VISIBLE 4 /* Host blob resources are mappable */
2020-09-23 17:32:02 -07:00
# define VIRTGPU_PARAM_CROSS_DEVICE 5 /* Cross virtio-device resource sharing */
2021-09-21 16:20:14 -07:00
# define VIRTGPU_PARAM_CONTEXT_INIT 6 /* DRM_VIRTGPU_CONTEXT_INIT */
# define VIRTGPU_PARAM_SUPPORTED_CAPSET_IDs 7 /* Bitmask of supported capability set ids */
2014-10-28 12:48:00 +01:00
struct drm_virtgpu_getparam {
2015-11-30 15:10:39 +01:00
__u64 param ;
__u64 value ;
2014-10-28 12:48:00 +01:00
} ;
/* NO_BO flags? NO resource flag? */
/* resource flag for y_0_top */
struct drm_virtgpu_resource_create {
2015-11-30 15:10:39 +01:00
__u32 target ;
__u32 format ;
__u32 bind ;
__u32 width ;
__u32 height ;
__u32 depth ;
__u32 array_size ;
__u32 last_level ;
__u32 nr_samples ;
__u32 flags ;
__u32 bo_handle ; /* if this is set - recreate a new resource attached to this bo ? */
__u32 res_handle ; /* returned by kernel */
__u32 size ; /* validate transfer in the host */
__u32 stride ; /* validate transfer in the host */
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_resource_info {
2015-11-30 15:10:39 +01:00
__u32 bo_handle ;
__u32 res_handle ;
__u32 size ;
2020-09-23 17:32:00 -07:00
__u32 blob_mem ;
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_3d_box {
2015-11-30 15:10:39 +01:00
__u32 x ;
__u32 y ;
__u32 z ;
__u32 w ;
__u32 h ;
__u32 d ;
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_3d_transfer_to_host {
2015-11-30 15:10:39 +01:00
__u32 bo_handle ;
2014-10-28 12:48:00 +01:00
struct drm_virtgpu_3d_box box ;
2015-11-30 15:10:39 +01:00
__u32 level ;
__u32 offset ;
2020-09-23 17:32:00 -07:00
__u32 stride ;
__u32 layer_stride ;
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_3d_transfer_from_host {
2015-11-30 15:10:39 +01:00
__u32 bo_handle ;
2014-10-28 12:48:00 +01:00
struct drm_virtgpu_3d_box box ;
2015-11-30 15:10:39 +01:00
__u32 level ;
__u32 offset ;
2020-09-23 17:32:00 -07:00
__u32 stride ;
__u32 layer_stride ;
2014-10-28 12:48:00 +01:00
} ;
# define VIRTGPU_WAIT_NOWAIT 1 /* like it */
struct drm_virtgpu_3d_wait {
2015-11-30 15:10:39 +01:00
__u32 handle ; /* 0 is an invalid handle */
__u32 flags ;
2014-10-28 12:48:00 +01:00
} ;
struct drm_virtgpu_get_caps {
2015-11-30 15:10:39 +01:00
__u32 cap_set_id ;
__u32 cap_set_ver ;
__u64 addr ;
__u32 size ;
__u32 pad ;
2014-10-28 12:48:00 +01:00
} ;
2020-09-23 17:32:00 -07:00
struct drm_virtgpu_resource_create_blob {
# define VIRTGPU_BLOB_MEM_GUEST 0x0001
# define VIRTGPU_BLOB_MEM_HOST3D 0x0002
# define VIRTGPU_BLOB_MEM_HOST3D_GUEST 0x0003
# define VIRTGPU_BLOB_FLAG_USE_MAPPABLE 0x0001
# define VIRTGPU_BLOB_FLAG_USE_SHAREABLE 0x0002
# define VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE 0x0004
/* zero is invalid blob_mem */
__u32 blob_mem ;
__u32 blob_flags ;
__u32 bo_handle ;
__u32 res_handle ;
__u64 size ;
/*
* for 3 D contexts with VIRTGPU_BLOB_MEM_HOST3D_GUEST and
* VIRTGPU_BLOB_MEM_HOST3D otherwise , must be zero .
*/
__u32 pad ;
__u32 cmd_size ;
__u64 cmd ;
__u64 blob_id ;
} ;
2021-09-21 16:20:14 -07:00
# define VIRTGPU_CONTEXT_PARAM_CAPSET_ID 0x0001
# define VIRTGPU_CONTEXT_PARAM_NUM_RINGS 0x0002
# define VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK 0x0003
struct drm_virtgpu_context_set_param {
__u64 param ;
__u64 value ;
} ;
struct drm_virtgpu_context_init {
__u32 num_params ;
__u32 pad ;
/* pointer to drm_virtgpu_context_set_param array */
__u64 ctx_set_params ;
} ;
2021-11-22 15:22:09 -08:00
/*
* Event code that ' s given when VIRTGPU_CONTEXT_PARAM_POLL_RINGS_MASK is in
* effect . The event size is sizeof ( drm_event ) , since there is no additional
* payload .
*/
# define VIRTGPU_EVENT_FENCE_SIGNALED 0x90000000
2014-10-28 12:48:00 +01:00
# define DRM_IOCTL_VIRTGPU_MAP \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_MAP , struct drm_virtgpu_map )
# define DRM_IOCTL_VIRTGPU_EXECBUFFER \
2018-11-12 17:51:55 +01:00
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_EXECBUFFER , \
2014-10-28 12:48:00 +01:00
struct drm_virtgpu_execbuffer )
# define DRM_IOCTL_VIRTGPU_GETPARAM \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_GETPARAM , \
struct drm_virtgpu_getparam )
# define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE , \
struct drm_virtgpu_resource_create )
# define DRM_IOCTL_VIRTGPU_RESOURCE_INFO \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_INFO , \
struct drm_virtgpu_resource_info )
# define DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_FROM_HOST , \
struct drm_virtgpu_3d_transfer_from_host )
# define DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_TRANSFER_TO_HOST , \
struct drm_virtgpu_3d_transfer_to_host )
# define DRM_IOCTL_VIRTGPU_WAIT \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_WAIT , \
struct drm_virtgpu_3d_wait )
# define DRM_IOCTL_VIRTGPU_GET_CAPS \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_GET_CAPS , \
struct drm_virtgpu_get_caps )
2020-09-23 17:32:00 -07:00
# define DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_RESOURCE_CREATE_BLOB , \
struct drm_virtgpu_resource_create_blob )
2021-09-21 16:20:14 -07:00
# define DRM_IOCTL_VIRTGPU_CONTEXT_INIT \
DRM_IOWR ( DRM_COMMAND_BASE + DRM_VIRTGPU_CONTEXT_INIT , \
struct drm_virtgpu_context_init )
2016-04-07 19:38:49 +01:00
# if defined(__cplusplus)
}
# endif
2014-10-28 12:48:00 +01:00
# endif