1 // SPDX-License-Identifier: GPL-2.0-or-later
3 #include <linux/dma-resv.h>
4 #include <linux/dma-fence-chain.h>
6 #include <drm/drm_atomic_state_helper.h>
7 #include <drm/drm_atomic_uapi.h>
8 #include <drm/drm_framebuffer.h>
9 #include <drm/drm_gem.h>
10 #include <drm/drm_gem_atomic_helper.h>
11 #include <drm/drm_gem_framebuffer_helper.h>
12 #include <drm/drm_simple_kms_helper.h>
14 #include "drm_internal.h"
19 * The GEM atomic helpers library implements generic atomic-commit
20 * functions for drivers that use GEM objects. Currently, it provides
21 * synchronization helpers, and plane state and framebuffer BO mappings
22 * for planes with shadow buffers.
24 * Before scanout, a plane's framebuffer needs to be synchronized with
25 * possible writers that draw into the framebuffer. All drivers should
26 * call drm_gem_plane_helper_prepare_fb() from their implementation of
27 * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
28 * the framebuffer so that the DRM core can synchronize access automatically.
29 * drm_gem_plane_helper_prepare_fb() can also be used directly as
30 * implementation of prepare_fb.
34 * #include <drm/drm_gem_atomic_helper.h>
36 * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
38 * . prepare_fb = drm_gem_plane_helper_prepare_fb,
41 * A driver using a shadow buffer copies the content of the shadow buffers
42 * into the HW's framebuffer memory during an atomic update. This requires
43 * a mapping of the shadow buffer into kernel address space. The mappings
44 * cannot be established by commit-tail functions, such as atomic_update,
45 * as this would violate locking rules around dma_buf_vmap().
47 * The helpers for shadow-buffered planes establish and release mappings,
48 * and provide struct drm_shadow_plane_state, which stores the plane's mapping
49 * for commit-tail functions.
51 * Shadow-buffered planes can easily be enabled by using the provided macros
52 * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
53 * These macros set up the plane and plane-helper callbacks to point to the
54 * shadow-buffer helpers.
58 * #include <drm/drm_gem_atomic_helper.h>
60 * struct drm_plane_funcs driver_plane_funcs = {
62 * DRM_GEM_SHADOW_PLANE_FUNCS,
65 * struct drm_plane_helper_funcs driver_plane_helper_funcs = {
67 * DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
70 * In the driver's atomic-update function, shadow-buffer mappings are available
71 * from the plane state. Use to_drm_shadow_plane_state() to upcast from
72 * struct drm_plane_state.
76 * void driver_plane_atomic_update(struct drm_plane *plane,
77 * struct drm_plane_state *old_plane_state)
79 * struct drm_plane_state *plane_state = plane->state;
80 * struct drm_shadow_plane_state *shadow_plane_state =
81 * to_drm_shadow_plane_state(plane_state);
83 * // access shadow buffer via shadow_plane_state->map
86 * A mapping address for each of the framebuffer's buffer object is stored in
87 * struct &drm_shadow_plane_state.map. The mappings are valid while the state
90 * Drivers that use struct drm_simple_display_pipe can use
91 * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
92 * callbacks. Access to shadow-buffer mappings is similar to regular
97 * struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
99 * DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
102 * void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
103 * struct drm_crtc_state *crtc_state,
104 * struct drm_plane_state *plane_state)
106 * struct drm_shadow_plane_state *shadow_plane_state =
107 * to_drm_shadow_plane_state(plane_state);
109 * // access shadow buffer via shadow_plane_state->map
118 * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
120 * @state: Plane state the fence will be attached to
122 * This function extracts the exclusive fence from &drm_gem_object.resv and
123 * attaches it to plane state for the atomic helper to wait on. This is
124 * necessary to correctly implement implicit synchronization for any buffers
125 * shared as a struct &dma_buf. This function can be used as the
126 * &drm_plane_helper_funcs.prepare_fb callback.
128 * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
129 * GEM based framebuffer drivers which have their buffers always pinned in
132 * This function is the default implementation for GEM drivers of
133 * &drm_plane_helper_funcs.prepare_fb if no callback is provided.
135 int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane,
136 struct drm_plane_state *state)
138 struct dma_fence *fence = dma_fence_get(state->fence);
139 enum dma_resv_usage usage;
147 * Only add the kernel fences here if there is already a fence set via
148 * explicit fencing interfaces on the atomic ioctl.
150 * This way explicit fencing can be used to overrule implicit fencing,
151 * which is important to make explicit fencing use-cases work: One
152 * example is using one buffer for 2 screens with different refresh
153 * rates. Implicit fencing will clamp rendering to the refresh rate of
154 * the slower screen, whereas explicit fence allows 2 independent
155 * render and display loops on a single buffer. If a driver allows
156 * obeys both implicit and explicit fences for plane updates, then it
157 * will break all the benefits of explicit fencing.
159 usage = fence ? DMA_RESV_USAGE_KERNEL : DMA_RESV_USAGE_WRITE;
161 for (i = 0; i < state->fb->format->num_planes; ++i) {
162 struct drm_gem_object *obj = drm_gem_fb_get_obj(state->fb, i);
163 struct dma_fence *new;
170 ret = dma_resv_get_singleton(obj->resv, usage, &new);
175 struct dma_fence_chain *chain = dma_fence_chain_alloc();
182 dma_fence_chain_init(chain, fence, new, 1);
183 fence = &chain->base;
190 dma_fence_put(state->fence);
191 state->fence = fence;
195 dma_fence_put(fence);
198 EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
201 * Shadow-buffered Planes
205 * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
207 * @new_shadow_plane_state: the new shadow-buffered plane state
209 * This function duplicates shadow-buffered plane state. This is helpful for drivers
210 * that subclass struct drm_shadow_plane_state.
212 * The function does not duplicate existing mappings of the shadow buffers.
213 * Mappings are maintained during the atomic commit by the plane's prepare_fb
214 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
215 * for corresponding helpers.
218 __drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane,
219 struct drm_shadow_plane_state *new_shadow_plane_state)
221 struct drm_plane_state *plane_state = plane->state;
222 struct drm_shadow_plane_state *shadow_plane_state =
223 to_drm_shadow_plane_state(plane_state);
225 __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
227 drm_format_conv_state_copy(&new_shadow_plane_state->fmtcnv_state,
228 &shadow_plane_state->fmtcnv_state);
230 EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state);
233 * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
236 * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
237 * shadow-buffered planes. It assumes the existing state to be of type
238 * struct drm_shadow_plane_state and it allocates the new state to be of this
241 * The function does not duplicate existing mappings of the shadow buffers.
242 * Mappings are maintained during the atomic commit by the plane's prepare_fb
243 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
244 * for corresponding helpers.
247 * A pointer to a new plane state on success, or NULL otherwise.
249 struct drm_plane_state *
250 drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
252 struct drm_plane_state *plane_state = plane->state;
253 struct drm_shadow_plane_state *new_shadow_plane_state;
258 new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
259 if (!new_shadow_plane_state)
261 __drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state);
263 return &new_shadow_plane_state->base;
265 EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
268 * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state
269 * @shadow_plane_state: the shadow-buffered plane state
271 * This function cleans up shadow-buffered plane state. Helpful for drivers that
272 * subclass struct drm_shadow_plane_state.
274 void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state)
276 drm_format_conv_state_release(&shadow_plane_state->fmtcnv_state);
277 __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
279 EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state);
282 * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
284 * @plane_state: the plane state of type struct drm_shadow_plane_state
286 * This function implements struct &drm_plane_funcs.atomic_destroy_state
287 * for shadow-buffered planes. It expects that mappings of shadow buffers
288 * have been released already.
290 void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
291 struct drm_plane_state *plane_state)
293 struct drm_shadow_plane_state *shadow_plane_state =
294 to_drm_shadow_plane_state(plane_state);
296 __drm_gem_destroy_shadow_plane_state(shadow_plane_state);
297 kfree(shadow_plane_state);
299 EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
302 * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane
304 * @shadow_plane_state: the shadow-buffered plane state
306 * This function resets state for shadow-buffered planes. Helpful
307 * for drivers that subclass struct drm_shadow_plane_state.
309 void __drm_gem_reset_shadow_plane(struct drm_plane *plane,
310 struct drm_shadow_plane_state *shadow_plane_state)
312 __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
313 drm_format_conv_state_init(&shadow_plane_state->fmtcnv_state);
315 EXPORT_SYMBOL(__drm_gem_reset_shadow_plane);
318 * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
321 * This function implements struct &drm_plane_funcs.reset_plane for
322 * shadow-buffered planes. It assumes the current plane state to be
323 * of type struct drm_shadow_plane and it allocates the new state of
326 void drm_gem_reset_shadow_plane(struct drm_plane *plane)
328 struct drm_shadow_plane_state *shadow_plane_state;
331 drm_gem_destroy_shadow_plane_state(plane, plane->state);
332 plane->state = NULL; /* must be set to NULL here */
335 shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
336 if (!shadow_plane_state)
338 __drm_gem_reset_shadow_plane(plane, shadow_plane_state);
340 EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
343 * drm_gem_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
345 * @plane_state: the plane state of type struct drm_shadow_plane_state
347 * This function implements struct &drm_plane_helper_funcs.begin_fb_access. It
348 * maps all buffer objects of the plane's framebuffer into kernel address
349 * space and stores them in struct &drm_shadow_plane_state.map. The first data
350 * bytes are available in struct &drm_shadow_plane_state.data.
352 * See drm_gem_end_shadow_fb_access() for cleanup.
355 * 0 on success, or a negative errno code otherwise.
357 int drm_gem_begin_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
359 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
360 struct drm_framebuffer *fb = plane_state->fb;
365 return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data);
367 EXPORT_SYMBOL(drm_gem_begin_shadow_fb_access);
370 * drm_gem_end_shadow_fb_access - releases shadow framebuffers from CPU access
372 * @plane_state: the plane state of type struct drm_shadow_plane_state
374 * This function implements struct &drm_plane_helper_funcs.end_fb_access. It
375 * undoes all effects of drm_gem_begin_shadow_fb_access() in reverse order.
377 * See drm_gem_begin_shadow_fb_access() for more information.
379 void drm_gem_end_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state)
381 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
382 struct drm_framebuffer *fb = plane_state->fb;
387 drm_gem_fb_vunmap(fb, shadow_plane_state->map);
389 EXPORT_SYMBOL(drm_gem_end_shadow_fb_access);
392 * drm_gem_simple_kms_begin_shadow_fb_access - prepares shadow framebuffers for CPU access
393 * @pipe: the simple display pipe
394 * @plane_state: the plane state of type struct drm_shadow_plane_state
396 * This function implements struct drm_simple_display_funcs.begin_fb_access.
398 * See drm_gem_begin_shadow_fb_access() for details and
399 * drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
402 * 0 on success, or a negative errno code otherwise.
404 int drm_gem_simple_kms_begin_shadow_fb_access(struct drm_simple_display_pipe *pipe,
405 struct drm_plane_state *plane_state)
407 return drm_gem_begin_shadow_fb_access(&pipe->plane, plane_state);
409 EXPORT_SYMBOL(drm_gem_simple_kms_begin_shadow_fb_access);
412 * drm_gem_simple_kms_end_shadow_fb_access - releases shadow framebuffers from CPU access
413 * @pipe: the simple display pipe
414 * @plane_state: the plane state of type struct drm_shadow_plane_state
416 * This function implements struct drm_simple_display_funcs.end_fb_access.
417 * It undoes all effects of drm_gem_simple_kms_begin_shadow_fb_access() in
420 * See drm_gem_simple_kms_begin_shadow_fb_access().
422 void drm_gem_simple_kms_end_shadow_fb_access(struct drm_simple_display_pipe *pipe,
423 struct drm_plane_state *plane_state)
425 drm_gem_end_shadow_fb_access(&pipe->plane, plane_state);
427 EXPORT_SYMBOL(drm_gem_simple_kms_end_shadow_fb_access);
430 * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
431 * @pipe: the simple display pipe
433 * This function implements struct drm_simple_display_funcs.reset_plane
434 * for shadow-buffered planes.
436 void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
438 drm_gem_reset_shadow_plane(&pipe->plane);
440 EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
443 * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
444 * @pipe: the simple display pipe
446 * This function implements struct drm_simple_display_funcs.duplicate_plane_state
447 * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
448 * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
449 * and cleanup_fb helpers.
452 * A pointer to a new plane state on success, or NULL otherwise.
454 struct drm_plane_state *
455 drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
457 return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
459 EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
462 * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
463 * @pipe: the simple display pipe
464 * @plane_state: the plane state of type struct drm_shadow_plane_state
466 * This function implements struct drm_simple_display_funcs.destroy_plane_state
467 * for shadow-buffered planes. It expects that mappings of shadow buffers
468 * have been released already.
470 void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
471 struct drm_plane_state *plane_state)
473 drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
475 EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);