2 * SPDX-License-Identifier: MIT
4 * Copyright © 2008-2018 Intel Corporation
7 #ifndef _I915_GPU_ERROR_H_
8 #define _I915_GPU_ERROR_H_
10 #include <linux/atomic.h>
11 #include <linux/kref.h>
12 #include <linux/ktime.h>
13 #include <linux/sched.h>
15 #include <drm/drm_mm.h>
17 #include "gt/intel_engine.h"
18 #include "gt/intel_gt_types.h"
19 #include "gt/uc/intel_uc_fw.h"
21 #include "intel_device_info.h"
24 #include "i915_gem_gtt.h"
25 #include "i915_params.h"
26 #include "i915_scheduler.h"
28 struct drm_i915_private;
29 struct i915_vma_compress;
30 struct intel_engine_capture_vma;
31 struct intel_overlay_error_state;
33 struct i915_vma_coredump {
34 struct i915_vma_coredump *next;
43 struct list_head page_list;
46 struct i915_request_coredump {
53 struct i915_sched_attr sched_attr;
56 struct __guc_capture_parsed_output;
58 struct intel_engine_coredump {
59 const struct intel_engine_cs *engine;
65 /* position of active request inside the ring */
66 u32 rq_head, rq_post, rq_tail;
86 u32 rc_psmi; /* sleep state */
94 struct intel_instdone instdone;
96 /* GuC matched capture-lists info */
97 struct intel_guc_state_capture *capture;
98 struct __guc_capture_parsed_output *guc_capture_node;
100 struct i915_gem_context_coredump {
101 char comm[TASK_COMM_LEN];
109 struct i915_sched_attr sched_attr;
112 struct i915_vma_coredump *vma;
114 struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
115 unsigned int num_ports;
125 struct intel_engine_coredump *next;
128 struct intel_gt_coredump {
129 const struct intel_gt *_gt;
133 struct intel_gt_info info;
135 /* Generic register state */
139 u32 gtier[6], ngtier;
141 u32 error; /* gen6+ */
142 u32 err_int; /* gen7 */
143 u32 fault_data0; /* gen8, gen9 */
144 u32 fault_data1; /* gen8, gen9 */
151 u32 aux_err; /* gen12 */
152 u32 gam_done; /* gen12 */
154 /* Display related */
156 u32 sfc_done[I915_MAX_SFC]; /* gen12 */
159 u64 fence[I915_MAX_NUM_FENCES];
161 struct intel_engine_coredump *engine;
163 struct intel_uc_coredump {
164 struct intel_uc_fw guc_fw;
165 struct intel_uc_fw huc_fw;
166 struct i915_vma_coredump *guc_log;
170 struct intel_gt_coredump *next;
173 struct i915_gpu_coredump {
178 unsigned long capture;
180 struct drm_i915_private *i915;
182 struct intel_gt_coredump *gt;
192 struct intel_device_info device_info;
193 struct intel_runtime_info runtime_info;
194 struct intel_driver_caps driver_caps;
195 struct i915_params params;
197 struct intel_overlay_error_state *overlay;
199 struct scatterlist *sgl, *fit;
202 struct i915_gpu_error {
203 /* For reset and error_state handling. */
205 /* Protected by the above dev->gpu_error.lock. */
206 struct i915_gpu_coredump *first_error;
208 atomic_t pending_fb_pin;
210 /** Number of times the device has been reset (global) */
211 atomic_t reset_count;
213 /** Number of times an engine has been reset */
214 atomic_t reset_engine_count[I915_NUM_ENGINES];
217 struct drm_i915_error_state_buf {
218 struct drm_i915_private *i915;
219 struct scatterlist *sgl, *cur, *end;
229 static inline u32 i915_reset_count(struct i915_gpu_error *error)
231 return atomic_read(&error->reset_count);
234 static inline u32 i915_reset_engine_count(struct i915_gpu_error *error,
235 const struct intel_engine_cs *engine)
237 return atomic_read(&error->reset_engine_count[engine->uabi_class]);
240 #define CORE_DUMP_FLAG_NONE 0x0
241 #define CORE_DUMP_FLAG_IS_GUC_CAPTURE BIT(0)
243 #if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
246 void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
247 void intel_gpu_error_print_vma(struct drm_i915_error_state_buf *m,
248 const struct intel_engine_cs *engine,
249 const struct i915_vma_coredump *vma);
250 struct i915_vma_coredump *
251 intel_gpu_error_find_batch(const struct intel_engine_coredump *ee);
253 struct i915_gpu_coredump *i915_gpu_coredump(struct intel_gt *gt,
254 intel_engine_mask_t engine_mask, u32 dump_flags);
255 void i915_capture_error_state(struct intel_gt *gt,
256 intel_engine_mask_t engine_mask, u32 dump_flags);
258 struct i915_gpu_coredump *
259 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
261 struct intel_gt_coredump *
262 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags);
264 struct intel_engine_coredump *
265 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags);
267 struct intel_engine_capture_vma *
268 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
269 struct i915_request *rq,
272 void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
273 struct intel_engine_capture_vma *capture,
274 struct i915_vma_compress *compress);
276 struct i915_vma_compress *
277 i915_vma_capture_prepare(struct intel_gt_coredump *gt);
279 void i915_vma_capture_finish(struct intel_gt_coredump *gt,
280 struct i915_vma_compress *compress);
282 void i915_error_state_store(struct i915_gpu_coredump *error);
284 static inline struct i915_gpu_coredump *
285 i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
292 i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
293 char *buf, loff_t offset, size_t count);
295 void __i915_gpu_coredump_free(struct kref *kref);
296 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
299 kref_put(&gpu->ref, __i915_gpu_coredump_free);
302 struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
303 void i915_reset_error_state(struct drm_i915_private *i915);
304 void i915_disable_error_state(struct drm_i915_private *i915, int err);
310 i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...)
315 i915_capture_error_state(struct intel_gt *gt, intel_engine_mask_t engine_mask, u32 dump_flags)
319 static inline struct i915_gpu_coredump *
320 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
325 static inline struct intel_gt_coredump *
326 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags)
331 static inline struct intel_engine_coredump *
332 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags)
337 static inline struct intel_engine_capture_vma *
338 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
339 struct i915_request *rq,
346 intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
347 struct intel_engine_capture_vma *capture,
348 struct i915_vma_compress *compress)
352 static inline struct i915_vma_compress *
353 i915_vma_capture_prepare(struct intel_gt_coredump *gt)
359 i915_vma_capture_finish(struct intel_gt_coredump *gt,
360 struct i915_vma_compress *compress)
365 i915_error_state_store(struct i915_gpu_coredump *error)
369 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
373 static inline struct i915_gpu_coredump *
374 i915_first_error_state(struct drm_i915_private *i915)
376 return ERR_PTR(-ENODEV);
379 static inline void i915_reset_error_state(struct drm_i915_private *i915)
383 static inline void i915_disable_error_state(struct drm_i915_private *i915,
388 #endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
390 #endif /* _I915_GPU_ERROR_H_ */