1 /* SPDX-License-Identifier: GPL-2.0-only OR MIT */
2 /* Copyright (c) 2023 Imagination Technologies Ltd. */
7 #include <drm/gpu_scheduler.h>
9 #include <linux/compiler_attributes.h>
10 #include <linux/dma-fence.h>
11 #include <linux/kref.h>
12 #include <linux/types.h>
13 #include <linux/xarray.h>
14 #include <uapi/drm/pvr_drm.h>
17 #include "pvr_device.h"
18 #include "pvr_queue.h"
20 /* Forward declaration from pvr_gem.h. */
23 enum pvr_context_priority {
24 PVR_CTX_PRIORITY_LOW = 0,
25 PVR_CTX_PRIORITY_MEDIUM,
26 PVR_CTX_PRIORITY_HIGH,
30 * struct pvr_context - Context data
33 /** @ref_count: Refcount for context. */
34 struct kref ref_count;
36 /** @pvr_dev: Pointer to owning device. */
37 struct pvr_device *pvr_dev;
39 /** @vm_ctx: Pointer to associated VM context. */
40 struct pvr_vm_context *vm_ctx;
42 /** @type: Type of context. */
43 enum drm_pvr_ctx_type type;
45 /** @flags: Context flags. */
48 /** @priority: Context priority*/
49 enum pvr_context_priority priority;
51 /** @fw_obj: FW object representing FW-side context data. */
52 struct pvr_fw_object *fw_obj;
54 /** @data: Pointer to local copy of FW context data. */
57 /** @data_size: Size of FW context data, in bytes. */
60 /** @ctx_id: FW context ID. */
64 * @faulty: Set to 1 when the context queues had unfinished job when
65 * a GPU reset happened.
67 * In that case, the context is in an inconsistent state and can't be
72 /** @queues: Union containing all kind of queues. */
75 /** @geometry: Geometry queue. */
76 struct pvr_queue *geometry;
78 /** @fragment: Fragment queue. */
79 struct pvr_queue *fragment;
82 /** @compute: Compute queue. */
83 struct pvr_queue *compute;
85 /** @compute: Transfer queue. */
86 struct pvr_queue *transfer;
89 /** @file_link: pvr_file PVR context list link. */
90 struct list_head file_link;
93 static __always_inline struct pvr_queue *
94 pvr_context_get_queue_for_job(struct pvr_context *ctx, enum drm_pvr_job_type type)
97 case DRM_PVR_JOB_TYPE_GEOMETRY:
98 return ctx->type == DRM_PVR_CTX_TYPE_RENDER ? ctx->queues.geometry : NULL;
99 case DRM_PVR_JOB_TYPE_FRAGMENT:
100 return ctx->type == DRM_PVR_CTX_TYPE_RENDER ? ctx->queues.fragment : NULL;
101 case DRM_PVR_JOB_TYPE_COMPUTE:
102 return ctx->type == DRM_PVR_CTX_TYPE_COMPUTE ? ctx->queues.compute : NULL;
103 case DRM_PVR_JOB_TYPE_TRANSFER_FRAG:
104 return ctx->type == DRM_PVR_CTX_TYPE_TRANSFER_FRAG ? ctx->queues.transfer : NULL;
111 * pvr_context_get() - Take additional reference on context.
112 * @ctx: Context pointer.
114 * Call pvr_context_put() to release.
117 * * The requested context on success, or
118 * * %NULL if no context pointer passed.
120 static __always_inline struct pvr_context *
121 pvr_context_get(struct pvr_context *ctx)
124 kref_get(&ctx->ref_count);
130 * pvr_context_get_if_referenced() - Take an additional reference on a still
131 * referenced context.
132 * @ctx: Context pointer.
134 * Call pvr_context_put() to release.
137 * * True on success, or
138 * * false if no context pointer passed, or the context wasn't still
141 static __always_inline bool
142 pvr_context_get_if_referenced(struct pvr_context *ctx)
144 return ctx != NULL && kref_get_unless_zero(&ctx->ref_count) != 0;
148 * pvr_context_lookup() - Lookup context pointer from handle and file.
149 * @pvr_file: Pointer to pvr_file structure.
150 * @handle: Context handle.
152 * Takes reference on context. Call pvr_context_put() to release.
155 * * The requested context on success, or
156 * * %NULL on failure (context does not exist, or does not belong to @pvr_file).
158 static __always_inline struct pvr_context *
159 pvr_context_lookup(struct pvr_file *pvr_file, u32 handle)
161 struct pvr_context *ctx;
163 /* Take the array lock to protect against context removal. */
164 xa_lock(&pvr_file->ctx_handles);
165 ctx = pvr_context_get(xa_load(&pvr_file->ctx_handles, handle));
166 xa_unlock(&pvr_file->ctx_handles);
172 * pvr_context_lookup_id() - Lookup context pointer from ID.
173 * @pvr_dev: Device pointer.
174 * @id: FW context ID.
176 * Takes reference on context. Call pvr_context_put() to release.
179 * * The requested context on success, or
180 * * %NULL on failure (context does not exist).
182 static __always_inline struct pvr_context *
183 pvr_context_lookup_id(struct pvr_device *pvr_dev, u32 id)
185 struct pvr_context *ctx;
187 /* Take the array lock to protect against context removal. */
188 xa_lock(&pvr_dev->ctx_ids);
190 /* Contexts are removed from the ctx_ids set in the context release path,
191 * meaning the ref_count reached zero before they get removed. We need
192 * to make sure we're not trying to acquire a context that's being
195 ctx = xa_load(&pvr_dev->ctx_ids, id);
196 if (!kref_get_unless_zero(&ctx->ref_count))
199 xa_unlock(&pvr_dev->ctx_ids);
204 static __always_inline u32
205 pvr_context_get_fw_addr(struct pvr_context *ctx)
209 pvr_fw_object_get_fw_addr(ctx->fw_obj, &ctx_fw_addr);
214 void pvr_context_put(struct pvr_context *ctx);
216 int pvr_context_create(struct pvr_file *pvr_file, struct drm_pvr_ioctl_create_context_args *args);
218 int pvr_context_destroy(struct pvr_file *pvr_file, u32 handle);
220 void pvr_destroy_contexts_for_file(struct pvr_file *pvr_file);
222 void pvr_context_device_init(struct pvr_device *pvr_dev);
224 void pvr_context_device_fini(struct pvr_device *pvr_dev);
226 #endif /* PVR_CONTEXT_H */