2018-03-08 09:50:37 +00:00
/*
* SPDX - License - Identifier : MIT
*
2021-05-19 10:15:35 +02:00
* Copyright © 2008 - 2018 Intel Corporation
2018-03-08 09:50:37 +00:00
*/
# ifndef _I915_GPU_ERROR_H_
# define _I915_GPU_ERROR_H_
2019-07-12 20:29:53 +01:00
# include <linux/atomic.h>
2018-03-08 09:50:37 +00:00
# include <linux/kref.h>
# include <linux/ktime.h>
# include <linux/sched.h>
# include <drm/drm_mm.h>
2019-04-24 18:48:39 +01:00
# include "gt/intel_engine.h"
2020-07-07 17:39:47 -07:00
# include "gt/intel_gt_types.h"
2019-07-13 11:00:11 +01:00
# include "gt/uc/intel_uc_fw.h"
2019-04-24 18:48:39 +01:00
2018-03-08 09:50:37 +00:00
# include "intel_device_info.h"
# include "i915_gem.h"
# include "i915_gem_gtt.h"
# include "i915_params.h"
2018-04-18 19:40:52 +01:00
# include "i915_scheduler.h"
2018-03-08 09:50:37 +00:00
struct drm_i915_private ;
2020-01-10 12:30:56 +00:00
struct i915_vma_compress ;
struct intel_engine_capture_vma ;
2018-03-08 09:50:37 +00:00
struct intel_overlay_error_state ;
2020-01-10 12:30:56 +00:00
struct i915_vma_coredump {
struct i915_vma_coredump * next ;
2018-03-08 09:50:37 +00:00
2020-01-10 12:30:56 +00:00
char name [ 20 ] ;
u64 gtt_offset ;
u64 gtt_size ;
u32 gtt_page_sizes ;
int num_pages ;
int page_count ;
int unused ;
2020-05-07 13:54:08 -05:00
u32 * pages [ ] ;
2020-01-10 12:30:56 +00:00
} ;
struct i915_request_coredump {
unsigned long flags ;
pid_t pid ;
u32 context ;
u32 seqno ;
u32 head ;
u32 tail ;
struct i915_sched_attr sched_attr ;
} ;
struct intel_engine_coredump {
const struct intel_engine_cs * engine ;
2018-03-08 09:50:37 +00:00
2020-11-04 13:47:42 +00:00
bool hung ;
2018-03-08 09:50:37 +00:00
bool simulated ;
u32 reset_count ;
2020-01-10 12:30:56 +00:00
/* position of active request inside the ring */
u32 rq_head , rq_post , rq_tail ;
/* Register state */
u32 ccid ;
u32 start ;
u32 tail ;
u32 head ;
u32 ctl ;
u32 mode ;
u32 hws ;
u32 ipeir ;
u32 ipehr ;
2020-01-28 20:43:15 +00:00
u32 esr ;
2020-01-10 12:30:56 +00:00
u32 bbstate ;
u32 instpm ;
u32 instps ;
u64 bbaddr ;
u64 acthd ;
u32 fault_reg ;
u64 faddr ;
u32 rc_psmi ; /* sleep state */
struct intel_instdone instdone ;
struct i915_gem_context_coredump {
char comm [ TASK_COMM_LEN ] ;
2020-02-16 13:36:20 +00:00
u64 total_runtime ;
u32 avg_runtime ;
2020-01-10 12:30:56 +00:00
pid_t pid ;
int active ;
int guilty ;
struct i915_sched_attr sched_attr ;
} context ;
struct i915_vma_coredump * vma ;
2020-01-10 12:30:59 +00:00
struct i915_request_coredump execlist [ EXECLIST_MAX_PORTS ] ;
2020-01-10 12:30:56 +00:00
unsigned int num_ports ;
struct {
u32 gfx_mode ;
union {
u64 pdp [ 4 ] ;
u32 pp_dir_base ;
} ;
} vm_info ;
struct intel_engine_coredump * next ;
} ;
struct intel_gt_coredump {
const struct intel_gt * _gt ;
bool awake ;
bool simulated ;
2018-03-08 09:50:37 +00:00
2020-07-07 17:39:47 -07:00
struct intel_gt_info info ;
2018-03-08 09:50:37 +00:00
/* Generic register state */
u32 eir ;
u32 pgtbl_er ;
u32 ier ;
2018-05-10 14:59:55 -07:00
u32 gtier [ 6 ] , ngtier ;
2018-03-08 09:50:37 +00:00
u32 derrmr ;
u32 forcewake ;
u32 error ; /* gen6+ */
u32 err_int ; /* gen7 */
u32 fault_data0 ; /* gen8, gen9 */
u32 fault_data1 ; /* gen8, gen9 */
u32 done_reg ;
u32 gac_eco ;
u32 gam_ecochk ;
u32 gab_ctl ;
u32 gfx_mode ;
2019-09-09 18:16:46 +01:00
u32 gtt_cache ;
2019-10-25 15:17:18 +03:00
u32 aux_err ; /* gen12 */
2019-10-29 18:38:40 +02:00
u32 sfc_done [ GEN12_SFC_DONE_MAX ] ; /* gen12 */
2019-10-29 18:38:41 +02:00
u32 gam_done ; /* gen12 */
2018-03-08 09:50:37 +00:00
u32 nfence ;
u64 fence [ I915_MAX_NUM_FENCES ] ;
2020-01-10 12:30:56 +00:00
struct intel_engine_coredump * engine ;
struct intel_uc_coredump {
struct intel_uc_fw guc_fw ;
struct intel_uc_fw huc_fw ;
struct i915_vma_coredump * guc_log ;
} * uc ;
struct intel_gt_coredump * next ;
} ;
struct i915_gpu_coredump {
struct kref ref ;
ktime_t time ;
ktime_t boottime ;
ktime_t uptime ;
unsigned long capture ;
struct drm_i915_private * i915 ;
struct intel_gt_coredump * gt ;
char error_msg [ 128 ] ;
bool simulated ;
bool wakelock ;
bool suspended ;
int iommu ;
u32 reset_count ;
u32 suspend_count ;
struct intel_device_info device_info ;
struct intel_runtime_info runtime_info ;
struct intel_driver_caps driver_caps ;
struct i915_params params ;
2018-03-08 09:50:37 +00:00
struct intel_overlay_error_state * overlay ;
2018-11-23 13:23:25 +00:00
struct scatterlist * sgl , * fit ;
2018-03-08 09:50:37 +00:00
} ;
struct i915_gpu_error {
/* For reset and error_state handling. */
spinlock_t lock ;
/* Protected by the above dev->gpu_error.lock. */
2020-01-10 12:30:56 +00:00
struct i915_gpu_coredump * first_error ;
2018-03-08 09:50:37 +00:00
atomic_t pending_fb_pin ;
2019-02-08 15:37:03 +00:00
/** Number of times the device has been reset (global) */
2019-07-12 20:29:53 +01:00
atomic_t reset_count ;
2019-02-08 15:37:03 +00:00
2018-03-08 09:50:37 +00:00
/** Number of times an engine has been reset */
2019-07-12 20:29:53 +01:00
atomic_t reset_engine_count [ I915_NUM_ENGINES ] ;
2018-03-08 09:50:37 +00:00
} ;
struct drm_i915_error_state_buf {
struct drm_i915_private * i915 ;
2018-11-23 13:23:25 +00:00
struct scatterlist * sgl , * cur , * end ;
char * buf ;
size_t bytes ;
size_t size ;
loff_t iter ;
2018-03-08 09:50:37 +00:00
int err ;
} ;
# if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
__printf ( 2 , 3 )
void i915_error_printf ( struct drm_i915_error_state_buf * e , const char * f , . . . ) ;
2020-11-04 13:47:42 +00:00
struct i915_gpu_coredump * i915_gpu_coredump ( struct intel_gt * gt ,
intel_engine_mask_t engine_mask ) ;
void i915_capture_error_state ( struct intel_gt * gt ,
intel_engine_mask_t engine_mask ) ;
2020-01-10 12:30:56 +00:00
struct i915_gpu_coredump *
i915_gpu_coredump_alloc ( struct drm_i915_private * i915 , gfp_t gfp ) ;
struct intel_gt_coredump *
intel_gt_coredump_alloc ( struct intel_gt * gt , gfp_t gfp ) ;
struct intel_engine_coredump *
intel_engine_coredump_alloc ( struct intel_engine_cs * engine , gfp_t gfp ) ;
2018-03-08 09:50:37 +00:00
2020-01-10 12:30:56 +00:00
struct intel_engine_capture_vma *
intel_engine_coredump_add_request ( struct intel_engine_coredump * ee ,
struct i915_request * rq ,
gfp_t gfp ) ;
void intel_engine_coredump_add_vma ( struct intel_engine_coredump * ee ,
struct intel_engine_capture_vma * capture ,
struct i915_vma_compress * compress ) ;
struct i915_vma_compress *
i915_vma_capture_prepare ( struct intel_gt_coredump * gt ) ;
void i915_vma_capture_finish ( struct intel_gt_coredump * gt ,
struct i915_vma_compress * compress ) ;
void i915_error_state_store ( struct i915_gpu_coredump * error ) ;
static inline struct i915_gpu_coredump *
i915_gpu_coredump_get ( struct i915_gpu_coredump * gpu )
2018-03-08 09:50:37 +00:00
{
kref_get ( & gpu - > ref ) ;
return gpu ;
}
2020-01-10 12:30:56 +00:00
ssize_t
i915_gpu_coredump_copy_to_buffer ( struct i915_gpu_coredump * error ,
char * buf , loff_t offset , size_t count ) ;
2018-11-23 13:23:25 +00:00
2020-01-10 12:30:56 +00:00
void __i915_gpu_coredump_free ( struct kref * kref ) ;
static inline void i915_gpu_coredump_put ( struct i915_gpu_coredump * gpu )
2018-03-08 09:50:37 +00:00
{
if ( gpu )
2020-01-10 12:30:56 +00:00
kref_put ( & gpu - > ref , __i915_gpu_coredump_free ) ;
2018-03-08 09:50:37 +00:00
}
2020-01-10 12:30:56 +00:00
struct i915_gpu_coredump * i915_first_error_state ( struct drm_i915_private * i915 ) ;
2018-03-08 09:50:37 +00:00
void i915_reset_error_state ( struct drm_i915_private * i915 ) ;
2018-11-02 16:12:12 +00:00
void i915_disable_error_state ( struct drm_i915_private * i915 , int err ) ;
2018-03-08 09:50:37 +00:00
# else
2020-11-04 13:47:42 +00:00
static inline void
i915_capture_error_state ( struct intel_gt * gt , intel_engine_mask_t engine_mask )
2018-03-08 09:50:37 +00:00
{
}
2020-01-10 12:30:56 +00:00
static inline struct i915_gpu_coredump *
i915_gpu_coredump_alloc ( struct drm_i915_private * i915 , gfp_t gfp )
{
return NULL ;
}
static inline struct intel_gt_coredump *
intel_gt_coredump_alloc ( struct intel_gt * gt , gfp_t gfp )
{
return NULL ;
}
static inline struct intel_engine_coredump *
intel_engine_coredump_alloc ( struct intel_engine_cs * engine , gfp_t gfp )
{
return NULL ;
}
static inline struct intel_engine_capture_vma *
intel_engine_coredump_add_request ( struct intel_engine_coredump * ee ,
struct i915_request * rq ,
gfp_t gfp )
{
return NULL ;
}
static inline void
intel_engine_coredump_add_vma ( struct intel_engine_coredump * ee ,
struct intel_engine_capture_vma * capture ,
struct i915_vma_compress * compress )
{
}
static inline struct i915_vma_compress *
2020-01-11 08:30:07 +00:00
i915_vma_capture_prepare ( struct intel_gt_coredump * gt )
2020-01-10 12:30:56 +00:00
{
return NULL ;
}
2020-01-13 18:40:09 +08:00
static inline void
i915_vma_capture_finish ( struct intel_gt_coredump * gt ,
struct i915_vma_compress * compress )
2020-01-10 12:30:56 +00:00
{
}
static inline void
2020-01-17 15:34:36 +08:00
i915_error_state_store ( struct i915_gpu_coredump * error )
2020-01-10 12:30:56 +00:00
{
}
2020-01-24 19:22:55 +00:00
static inline void i915_gpu_coredump_put ( struct i915_gpu_coredump * gpu )
{
}
2020-01-10 12:30:56 +00:00
static inline struct i915_gpu_coredump *
2018-03-08 09:50:37 +00:00
i915_first_error_state ( struct drm_i915_private * i915 )
{
2018-11-02 16:12:12 +00:00
return ERR_PTR ( - ENODEV ) ;
2018-03-08 09:50:37 +00:00
}
static inline void i915_reset_error_state ( struct drm_i915_private * i915 )
{
}
2018-11-02 16:12:12 +00:00
static inline void i915_disable_error_state ( struct drm_i915_private * i915 ,
int err )
{
}
2018-03-08 09:50:37 +00:00
# endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
# endif /* _I915_GPU_ERROR_H_ */