2 * SPDX-License-Identifier: MIT
4 * Copyright © 2008-2018 Intel Corporation
7 #ifndef _I915_GPU_ERROR_H_
8 #define _I915_GPU_ERROR_H_
10 #include <linux/atomic.h>
11 #include <linux/kref.h>
12 #include <linux/ktime.h>
13 #include <linux/sched.h>
15 #include <drm/drm_mm.h>
17 #include "gt/intel_engine.h"
18 #include "gt/intel_gt_types.h"
19 #include "gt/uc/intel_uc_fw.h"
21 #include "intel_device_info.h"
24 #include "i915_gem_gtt.h"
25 #include "i915_params.h"
26 #include "i915_scheduler.h"
28 struct drm_i915_private;
29 struct i915_vma_compress;
30 struct intel_engine_capture_vma;
31 struct intel_overlay_error_state;
33 struct i915_vma_coredump {
34 struct i915_vma_coredump *next;
43 struct list_head page_list;
46 struct i915_request_coredump {
53 struct i915_sched_attr sched_attr;
56 struct __guc_capture_parsed_output;
58 struct intel_engine_coredump {
59 const struct intel_engine_cs *engine;
65 /* position of active request inside the ring */
66 u32 rq_head, rq_post, rq_tail;
86 u32 rc_psmi; /* sleep state */
94 struct intel_instdone instdone;
96 /* GuC matched capture-lists info */
97 struct intel_guc_state_capture *capture;
98 struct __guc_capture_parsed_output *guc_capture_node;
100 struct i915_gem_context_coredump {
101 char comm[TASK_COMM_LEN];
109 struct i915_sched_attr sched_attr;
112 struct i915_vma_coredump *vma;
114 struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
115 unsigned int num_ports;
125 struct intel_engine_coredump *next;
128 struct intel_ctb_coredump {
137 struct intel_gt_coredump {
138 const struct intel_gt *_gt;
142 struct intel_gt_info info;
144 /* Generic register state */
148 u32 gtier[6], ngtier;
150 u32 error; /* gen6+ */
151 u32 err_int; /* gen7 */
152 u32 fault_data0; /* gen8, gen9 */
153 u32 fault_data1; /* gen8, gen9 */
160 u32 aux_err; /* gen12 */
161 u32 gam_done; /* gen12 */
165 /* Display related */
167 u32 sfc_done[I915_MAX_SFC]; /* gen12 */
170 u64 fence[I915_MAX_NUM_FENCES];
172 struct intel_engine_coredump *engine;
174 struct intel_uc_coredump {
175 struct intel_uc_fw guc_fw;
176 struct intel_uc_fw huc_fw;
178 struct intel_ctb_coredump ctb[2];
179 struct i915_vma_coredump *vma_ctb;
180 struct i915_vma_coredump *vma_log;
187 struct intel_gt_coredump *next;
190 struct i915_gpu_coredump {
195 unsigned long capture;
197 struct drm_i915_private *i915;
199 struct intel_gt_coredump *gt;
209 struct intel_device_info device_info;
210 struct intel_runtime_info runtime_info;
211 struct intel_driver_caps driver_caps;
212 struct i915_params params;
214 struct intel_overlay_error_state *overlay;
216 struct scatterlist *sgl, *fit;
219 struct i915_gpu_error {
220 /* For reset and error_state handling. */
222 /* Protected by the above dev->gpu_error.lock. */
223 struct i915_gpu_coredump *first_error;
225 atomic_t pending_fb_pin;
227 /** Number of times the device has been reset (global) */
228 atomic_t reset_count;
230 /** Number of times an engine has been reset */
231 atomic_t reset_engine_count[I915_NUM_ENGINES];
234 struct drm_i915_error_state_buf {
235 struct drm_i915_private *i915;
236 struct scatterlist *sgl, *cur, *end;
246 static inline u32 i915_reset_count(struct i915_gpu_error *error)
248 return atomic_read(&error->reset_count);
251 static inline u32 i915_reset_engine_count(struct i915_gpu_error *error,
252 const struct intel_engine_cs *engine)
254 return atomic_read(&error->reset_engine_count[engine->uabi_class]);
257 #define CORE_DUMP_FLAG_NONE 0x0
258 #define CORE_DUMP_FLAG_IS_GUC_CAPTURE BIT(0)
260 #if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
263 void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
264 void intel_gpu_error_print_vma(struct drm_i915_error_state_buf *m,
265 const struct intel_engine_cs *engine,
266 const struct i915_vma_coredump *vma);
267 struct i915_vma_coredump *
268 intel_gpu_error_find_batch(const struct intel_engine_coredump *ee);
270 struct i915_gpu_coredump *i915_gpu_coredump(struct intel_gt *gt,
271 intel_engine_mask_t engine_mask, u32 dump_flags);
272 void i915_capture_error_state(struct intel_gt *gt,
273 intel_engine_mask_t engine_mask, u32 dump_flags);
275 struct i915_gpu_coredump *
276 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
278 struct intel_gt_coredump *
279 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags);
281 struct intel_engine_coredump *
282 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags);
284 struct intel_engine_capture_vma *
285 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
286 struct i915_request *rq,
289 void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
290 struct intel_engine_capture_vma *capture,
291 struct i915_vma_compress *compress);
293 struct i915_vma_compress *
294 i915_vma_capture_prepare(struct intel_gt_coredump *gt);
296 void i915_vma_capture_finish(struct intel_gt_coredump *gt,
297 struct i915_vma_compress *compress);
299 void i915_error_state_store(struct i915_gpu_coredump *error);
301 static inline struct i915_gpu_coredump *
302 i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
309 i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
310 char *buf, loff_t offset, size_t count);
312 void __i915_gpu_coredump_free(struct kref *kref);
313 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
316 kref_put(&gpu->ref, __i915_gpu_coredump_free);
319 struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
320 void i915_reset_error_state(struct drm_i915_private *i915);
321 void i915_disable_error_state(struct drm_i915_private *i915, int err);
327 i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...)
332 i915_capture_error_state(struct intel_gt *gt, intel_engine_mask_t engine_mask, u32 dump_flags)
336 static inline struct i915_gpu_coredump *
337 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
342 static inline struct intel_gt_coredump *
343 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags)
348 static inline struct intel_engine_coredump *
349 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags)
354 static inline struct intel_engine_capture_vma *
355 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
356 struct i915_request *rq,
363 intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
364 struct intel_engine_capture_vma *capture,
365 struct i915_vma_compress *compress)
369 static inline struct i915_vma_compress *
370 i915_vma_capture_prepare(struct intel_gt_coredump *gt)
376 i915_vma_capture_finish(struct intel_gt_coredump *gt,
377 struct i915_vma_compress *compress)
382 i915_error_state_store(struct i915_gpu_coredump *error)
386 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
390 static inline struct i915_gpu_coredump *
391 i915_first_error_state(struct drm_i915_private *i915)
393 return ERR_PTR(-ENODEV);
396 static inline void i915_reset_error_state(struct drm_i915_private *i915)
400 static inline void i915_disable_error_state(struct drm_i915_private *i915,
405 #endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
407 #endif /* _I915_GPU_ERROR_H_ */