1 // SPDX-License-Identifier: GPL-2.0
3 * ZynqMP DisplayPort Subsystem - KMS API
5 * Copyright (C) 2017 - 2021 Xilinx, Inc.
12 #include <drm/clients/drm_client_setup.h>
13 #include <drm/drm_atomic.h>
14 #include <drm/drm_atomic_helper.h>
15 #include <drm/drm_blend.h>
16 #include <drm/drm_bridge.h>
17 #include <drm/drm_bridge_connector.h>
18 #include <drm/drm_connector.h>
19 #include <drm/drm_crtc.h>
20 #include <drm/drm_device.h>
21 #include <drm/drm_drv.h>
22 #include <drm/drm_encoder.h>
23 #include <drm/drm_fbdev_dma.h>
24 #include <drm/drm_fourcc.h>
25 #include <drm/drm_framebuffer.h>
26 #include <drm/drm_gem_dma_helper.h>
27 #include <drm/drm_gem_framebuffer_helper.h>
28 #include <drm/drm_managed.h>
29 #include <drm/drm_mode_config.h>
30 #include <drm/drm_plane.h>
31 #include <drm/drm_probe_helper.h>
32 #include <drm/drm_simple_kms_helper.h>
33 #include <drm/drm_vblank.h>
35 #include <linux/clk.h>
36 #include <linux/delay.h>
37 #include <linux/pm_runtime.h>
38 #include <linux/spinlock.h>
40 #include "zynqmp_disp.h"
41 #include "zynqmp_dp.h"
42 #include "zynqmp_dpsub.h"
43 #include "zynqmp_kms.h"
45 static inline struct zynqmp_dpsub *to_zynqmp_dpsub(struct drm_device *drm)
47 return container_of(drm, struct zynqmp_dpsub_drm, dev)->dpsub;
50 /* -----------------------------------------------------------------------------
54 static int zynqmp_dpsub_plane_atomic_check(struct drm_plane *plane,
55 struct drm_atomic_state *state)
57 struct drm_plane_state *new_plane_state = drm_atomic_get_new_plane_state(state,
59 struct drm_crtc_state *crtc_state;
61 if (!new_plane_state->crtc)
64 crtc_state = drm_atomic_get_crtc_state(state, new_plane_state->crtc);
65 if (IS_ERR(crtc_state))
66 return PTR_ERR(crtc_state);
68 return drm_atomic_helper_check_plane_state(new_plane_state,
75 static void zynqmp_dpsub_plane_atomic_disable(struct drm_plane *plane,
76 struct drm_atomic_state *state)
78 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
80 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
81 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
86 zynqmp_disp_layer_disable(layer);
88 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
89 zynqmp_disp_blend_set_global_alpha(dpsub->disp, false,
90 plane->state->alpha >> 8);
93 static void zynqmp_dpsub_plane_atomic_update(struct drm_plane *plane,
94 struct drm_atomic_state *state)
96 struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state, plane);
97 struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state, plane);
98 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(plane->dev);
99 struct zynqmp_disp_layer *layer = dpsub->layers[plane->index];
100 bool format_changed = false;
102 if (!old_state->fb ||
103 old_state->fb->format->format != new_state->fb->format->format)
104 format_changed = true;
107 * If the format has changed (including going from a previously
108 * disabled state to any format), reconfigure the format. Disable the
109 * plane first if needed.
111 if (format_changed) {
113 zynqmp_disp_layer_disable(layer);
115 zynqmp_disp_layer_set_format(layer, new_state->fb->format);
118 zynqmp_disp_layer_update(layer, new_state);
120 if (plane->index == ZYNQMP_DPSUB_LAYER_GFX)
121 zynqmp_disp_blend_set_global_alpha(dpsub->disp, true,
122 plane->state->alpha >> 8);
125 * Unconditionally enable the layer, as it may have been disabled
126 * previously either explicitly to reconfigure layer format, or
127 * implicitly after DPSUB reset during display mode change. DRM
128 * framework calls this callback for enabled planes only.
130 zynqmp_disp_layer_enable(layer);
133 static const struct drm_plane_helper_funcs zynqmp_dpsub_plane_helper_funcs = {
134 .atomic_check = zynqmp_dpsub_plane_atomic_check,
135 .atomic_update = zynqmp_dpsub_plane_atomic_update,
136 .atomic_disable = zynqmp_dpsub_plane_atomic_disable,
139 static const struct drm_plane_funcs zynqmp_dpsub_plane_funcs = {
140 .update_plane = drm_atomic_helper_update_plane,
141 .disable_plane = drm_atomic_helper_disable_plane,
142 .destroy = drm_plane_cleanup,
143 .reset = drm_atomic_helper_plane_reset,
144 .atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
145 .atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
148 static int zynqmp_dpsub_create_planes(struct zynqmp_dpsub *dpsub)
153 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++) {
154 struct zynqmp_disp_layer *layer = dpsub->layers[i];
155 struct drm_plane *plane = &dpsub->drm->planes[i];
156 enum drm_plane_type type;
157 unsigned int num_formats;
160 formats = zynqmp_disp_layer_drm_formats(layer, &num_formats);
164 /* Graphics layer is primary, and video layer is overlay. */
165 type = i == ZYNQMP_DPSUB_LAYER_VID
166 ? DRM_PLANE_TYPE_OVERLAY : DRM_PLANE_TYPE_PRIMARY;
167 ret = drm_universal_plane_init(&dpsub->drm->dev, plane, 0,
168 &zynqmp_dpsub_plane_funcs,
169 formats, num_formats,
175 drm_plane_helper_add(plane, &zynqmp_dpsub_plane_helper_funcs);
177 drm_plane_create_zpos_immutable_property(plane, i);
178 if (i == ZYNQMP_DPSUB_LAYER_GFX)
179 drm_plane_create_alpha_property(plane);
185 /* -----------------------------------------------------------------------------
189 static inline struct zynqmp_dpsub *crtc_to_dpsub(struct drm_crtc *crtc)
191 return container_of(crtc, struct zynqmp_dpsub_drm, crtc)->dpsub;
194 static void zynqmp_dpsub_crtc_atomic_enable(struct drm_crtc *crtc,
195 struct drm_atomic_state *state)
197 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
198 struct drm_display_mode *adjusted_mode = &crtc->state->adjusted_mode;
201 pm_runtime_get_sync(dpsub->dev);
203 zynqmp_disp_setup_clock(dpsub->disp, adjusted_mode->clock * 1000);
205 ret = clk_prepare_enable(dpsub->vid_clk);
207 dev_err(dpsub->dev, "failed to enable a pixel clock\n");
208 pm_runtime_put_sync(dpsub->dev);
212 zynqmp_disp_enable(dpsub->disp);
214 /* Delay of 3 vblank intervals for timing gen to be stable */
215 vrefresh = (adjusted_mode->clock * 1000) /
216 (adjusted_mode->vtotal * adjusted_mode->htotal);
217 msleep(3 * 1000 / vrefresh);
220 static void zynqmp_dpsub_crtc_atomic_disable(struct drm_crtc *crtc,
221 struct drm_atomic_state *state)
223 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
224 struct drm_plane_state *old_plane_state;
227 * Disable the plane if active. The old plane state can be NULL in the
228 * .shutdown() path if the plane is already disabled, skip
229 * zynqmp_disp_plane_atomic_disable() in that case.
231 old_plane_state = drm_atomic_get_old_plane_state(state, crtc->primary);
233 zynqmp_dpsub_plane_atomic_disable(crtc->primary, state);
235 zynqmp_disp_disable(dpsub->disp);
237 drm_crtc_vblank_off(crtc);
239 spin_lock_irq(&crtc->dev->event_lock);
240 if (crtc->state->event) {
241 drm_crtc_send_vblank_event(crtc, crtc->state->event);
242 crtc->state->event = NULL;
244 spin_unlock_irq(&crtc->dev->event_lock);
246 clk_disable_unprepare(dpsub->vid_clk);
247 pm_runtime_put_sync(dpsub->dev);
250 static int zynqmp_dpsub_crtc_atomic_check(struct drm_crtc *crtc,
251 struct drm_atomic_state *state)
253 return drm_atomic_add_affected_planes(state, crtc);
256 static void zynqmp_dpsub_crtc_atomic_begin(struct drm_crtc *crtc,
257 struct drm_atomic_state *state)
259 drm_crtc_vblank_on(crtc);
262 static void zynqmp_dpsub_crtc_atomic_flush(struct drm_crtc *crtc,
263 struct drm_atomic_state *state)
265 if (crtc->state->event) {
266 struct drm_pending_vblank_event *event;
268 /* Consume the flip_done event from atomic helper. */
269 event = crtc->state->event;
270 crtc->state->event = NULL;
272 event->pipe = drm_crtc_index(crtc);
274 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
276 spin_lock_irq(&crtc->dev->event_lock);
277 drm_crtc_arm_vblank_event(crtc, event);
278 spin_unlock_irq(&crtc->dev->event_lock);
282 static const struct drm_crtc_helper_funcs zynqmp_dpsub_crtc_helper_funcs = {
283 .atomic_enable = zynqmp_dpsub_crtc_atomic_enable,
284 .atomic_disable = zynqmp_dpsub_crtc_atomic_disable,
285 .atomic_check = zynqmp_dpsub_crtc_atomic_check,
286 .atomic_begin = zynqmp_dpsub_crtc_atomic_begin,
287 .atomic_flush = zynqmp_dpsub_crtc_atomic_flush,
290 static int zynqmp_dpsub_crtc_enable_vblank(struct drm_crtc *crtc)
292 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
294 zynqmp_dp_enable_vblank(dpsub->dp);
299 static void zynqmp_dpsub_crtc_disable_vblank(struct drm_crtc *crtc)
301 struct zynqmp_dpsub *dpsub = crtc_to_dpsub(crtc);
303 zynqmp_dp_disable_vblank(dpsub->dp);
306 static const struct drm_crtc_funcs zynqmp_dpsub_crtc_funcs = {
307 .destroy = drm_crtc_cleanup,
308 .set_config = drm_atomic_helper_set_config,
309 .page_flip = drm_atomic_helper_page_flip,
310 .reset = drm_atomic_helper_crtc_reset,
311 .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
312 .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
313 .enable_vblank = zynqmp_dpsub_crtc_enable_vblank,
314 .disable_vblank = zynqmp_dpsub_crtc_disable_vblank,
317 static int zynqmp_dpsub_create_crtc(struct zynqmp_dpsub *dpsub)
319 struct drm_plane *plane = &dpsub->drm->planes[ZYNQMP_DPSUB_LAYER_GFX];
320 struct drm_crtc *crtc = &dpsub->drm->crtc;
323 ret = drm_crtc_init_with_planes(&dpsub->drm->dev, crtc, plane,
324 NULL, &zynqmp_dpsub_crtc_funcs, NULL);
328 drm_crtc_helper_add(crtc, &zynqmp_dpsub_crtc_helper_funcs);
330 /* Start with vertical blanking interrupt reporting disabled. */
331 drm_crtc_vblank_off(crtc);
336 static void zynqmp_dpsub_map_crtc_to_plane(struct zynqmp_dpsub *dpsub)
338 u32 possible_crtcs = drm_crtc_mask(&dpsub->drm->crtc);
341 for (i = 0; i < ARRAY_SIZE(dpsub->drm->planes); i++)
342 dpsub->drm->planes[i].possible_crtcs = possible_crtcs;
346 * zynqmp_dpsub_drm_handle_vblank - Handle the vblank event
347 * @dpsub: DisplayPort subsystem
349 * This function handles the vblank interrupt, and sends an event to
350 * CRTC object. This will be called by the DP vblank interrupt handler.
352 void zynqmp_dpsub_drm_handle_vblank(struct zynqmp_dpsub *dpsub)
354 drm_crtc_handle_vblank(&dpsub->drm->crtc);
357 /* -----------------------------------------------------------------------------
358 * Dumb Buffer & Framebuffer Allocation
361 static int zynqmp_dpsub_dumb_create(struct drm_file *file_priv,
362 struct drm_device *drm,
363 struct drm_mode_create_dumb *args)
365 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
366 unsigned int pitch = DIV_ROUND_UP(args->width * args->bpp, 8);
368 /* Enforce the alignment constraints of the DMA engine. */
369 args->pitch = ALIGN(pitch, dpsub->dma_align);
371 return drm_gem_dma_dumb_create_internal(file_priv, drm, args);
374 static struct drm_framebuffer *
375 zynqmp_dpsub_fb_create(struct drm_device *drm, struct drm_file *file_priv,
376 const struct drm_mode_fb_cmd2 *mode_cmd)
378 struct zynqmp_dpsub *dpsub = to_zynqmp_dpsub(drm);
379 struct drm_mode_fb_cmd2 cmd = *mode_cmd;
382 /* Enforce the alignment constraints of the DMA engine. */
383 for (i = 0; i < ARRAY_SIZE(cmd.pitches); ++i)
384 cmd.pitches[i] = ALIGN(cmd.pitches[i], dpsub->dma_align);
386 return drm_gem_fb_create(drm, file_priv, &cmd);
389 static const struct drm_mode_config_funcs zynqmp_dpsub_mode_config_funcs = {
390 .fb_create = zynqmp_dpsub_fb_create,
391 .atomic_check = drm_atomic_helper_check,
392 .atomic_commit = drm_atomic_helper_commit,
395 /* -----------------------------------------------------------------------------
399 DEFINE_DRM_GEM_DMA_FOPS(zynqmp_dpsub_drm_fops);
401 static const struct drm_driver zynqmp_dpsub_drm_driver = {
402 .driver_features = DRIVER_MODESET | DRIVER_GEM |
405 DRM_GEM_DMA_DRIVER_OPS_WITH_DUMB_CREATE(zynqmp_dpsub_dumb_create),
406 DRM_FBDEV_DMA_DRIVER_OPS,
408 .fops = &zynqmp_dpsub_drm_fops,
410 .name = "zynqmp-dpsub",
411 .desc = "Xilinx DisplayPort Subsystem Driver",
416 static int zynqmp_dpsub_kms_init(struct zynqmp_dpsub *dpsub)
418 struct drm_encoder *encoder = &dpsub->drm->encoder;
419 struct drm_connector *connector;
422 /* Create the planes and the CRTC. */
423 ret = zynqmp_dpsub_create_planes(dpsub);
427 ret = zynqmp_dpsub_create_crtc(dpsub);
431 zynqmp_dpsub_map_crtc_to_plane(dpsub);
433 /* Create the encoder and attach the bridge. */
434 encoder->possible_crtcs |= drm_crtc_mask(&dpsub->drm->crtc);
435 drm_simple_encoder_init(&dpsub->drm->dev, encoder, DRM_MODE_ENCODER_NONE);
437 ret = drm_bridge_attach(encoder, dpsub->bridge, NULL,
438 DRM_BRIDGE_ATTACH_NO_CONNECTOR);
440 dev_err(dpsub->dev, "failed to attach bridge to encoder\n");
444 /* Create the connector for the chain of bridges. */
445 connector = drm_bridge_connector_init(&dpsub->drm->dev, encoder);
446 if (IS_ERR(connector)) {
447 dev_err(dpsub->dev, "failed to created connector\n");
448 ret = PTR_ERR(connector);
452 ret = drm_connector_attach_encoder(connector, encoder);
454 dev_err(dpsub->dev, "failed to attach connector to encoder\n");
461 drm_encoder_cleanup(encoder);
465 static void zynqmp_dpsub_drm_release(struct drm_device *drm, void *res)
467 struct zynqmp_dpsub_drm *dpdrm = res;
469 zynqmp_dpsub_release(dpdrm->dpsub);
472 int zynqmp_dpsub_drm_init(struct zynqmp_dpsub *dpsub)
474 struct zynqmp_dpsub_drm *dpdrm;
475 struct drm_device *drm;
479 * Allocate the drm_device and immediately add a cleanup action to
480 * release the zynqmp_dpsub instance. If any of those operations fail,
481 * dpsub->drm will remain NULL, which tells the caller that it must
484 dpdrm = devm_drm_dev_alloc(dpsub->dev, &zynqmp_dpsub_drm_driver,
485 struct zynqmp_dpsub_drm, dev);
487 return PTR_ERR(dpdrm);
489 dpdrm->dpsub = dpsub;
492 ret = drmm_add_action(drm, zynqmp_dpsub_drm_release, dpdrm);
498 /* Initialize mode config, vblank and the KMS poll helper. */
499 ret = drmm_mode_config_init(drm);
503 drm->mode_config.funcs = &zynqmp_dpsub_mode_config_funcs;
504 drm->mode_config.min_width = 0;
505 drm->mode_config.min_height = 0;
506 drm->mode_config.max_width = ZYNQMP_DISP_MAX_WIDTH;
507 drm->mode_config.max_height = ZYNQMP_DISP_MAX_HEIGHT;
509 ret = drm_vblank_init(drm, 1);
513 ret = zynqmp_dpsub_kms_init(dpsub);
517 drm_kms_helper_poll_init(drm);
519 /* Reset all components and register the DRM device. */
520 drm_mode_config_reset(drm);
522 ret = drm_dev_register(drm, 0);
526 /* Initialize fbdev generic emulation. */
527 drm_client_setup_with_fourcc(drm, DRM_FORMAT_RGB888);
532 drm_kms_helper_poll_fini(drm);
536 void zynqmp_dpsub_drm_cleanup(struct zynqmp_dpsub *dpsub)
538 struct drm_device *drm = &dpsub->drm->dev;
541 drm_atomic_helper_shutdown(drm);
542 drm_encoder_cleanup(&dpsub->drm->encoder);
543 drm_kms_helper_poll_fini(drm);