]> Git Repo - linux.git/blob - drivers/gpu/drm/amd/amdgpu/dce_virtual.c
Merge git://git.kernel.org/pub/scm/linux/kernel/git/pablo/nf-next
[linux.git] / drivers / gpu / drm / amd / amdgpu / dce_virtual.c
1 /*
2  * Copyright 2014 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #include <drm/drmP.h>
24 #include "amdgpu.h"
25 #include "amdgpu_pm.h"
26 #include "amdgpu_i2c.h"
27 #include "atom.h"
28 #include "amdgpu_pll.h"
29 #include "amdgpu_connectors.h"
30 #ifdef CONFIG_DRM_AMDGPU_SI
31 #include "dce_v6_0.h"
32 #endif
33 #ifdef CONFIG_DRM_AMDGPU_CIK
34 #include "dce_v8_0.h"
35 #endif
36 #include "dce_v10_0.h"
37 #include "dce_v11_0.h"
38 #include "dce_virtual.h"
39
40 #define DCE_VIRTUAL_VBLANK_PERIOD 16666666
41
42
43 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev);
44 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev);
45 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
46                                               int index);
47 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
48                                                         int crtc,
49                                                         enum amdgpu_interrupt_state state);
50
51 /**
52  * dce_virtual_vblank_wait - vblank wait asic callback.
53  *
54  * @adev: amdgpu_device pointer
55  * @crtc: crtc to wait for vblank on
56  *
57  * Wait for vblank on the requested crtc (evergreen+).
58  */
59 static void dce_virtual_vblank_wait(struct amdgpu_device *adev, int crtc)
60 {
61         return;
62 }
63
64 static u32 dce_virtual_vblank_get_counter(struct amdgpu_device *adev, int crtc)
65 {
66         return 0;
67 }
68
69 static void dce_virtual_page_flip(struct amdgpu_device *adev,
70                               int crtc_id, u64 crtc_base, bool async)
71 {
72         return;
73 }
74
75 static int dce_virtual_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
76                                         u32 *vbl, u32 *position)
77 {
78         *vbl = 0;
79         *position = 0;
80
81         return -EINVAL;
82 }
83
84 static bool dce_virtual_hpd_sense(struct amdgpu_device *adev,
85                                enum amdgpu_hpd_id hpd)
86 {
87         return true;
88 }
89
90 static void dce_virtual_hpd_set_polarity(struct amdgpu_device *adev,
91                                       enum amdgpu_hpd_id hpd)
92 {
93         return;
94 }
95
96 static u32 dce_virtual_hpd_get_gpio_reg(struct amdgpu_device *adev)
97 {
98         return 0;
99 }
100
101 /**
102  * dce_virtual_bandwidth_update - program display watermarks
103  *
104  * @adev: amdgpu_device pointer
105  *
106  * Calculate and program the display watermarks and line
107  * buffer allocation (CIK).
108  */
109 static void dce_virtual_bandwidth_update(struct amdgpu_device *adev)
110 {
111         return;
112 }
113
114 static int dce_virtual_crtc_gamma_set(struct drm_crtc *crtc, u16 *red,
115                                       u16 *green, u16 *blue, uint32_t size,
116                                       struct drm_modeset_acquire_ctx *ctx)
117 {
118         return 0;
119 }
120
121 static void dce_virtual_crtc_destroy(struct drm_crtc *crtc)
122 {
123         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
124
125         drm_crtc_cleanup(crtc);
126         kfree(amdgpu_crtc);
127 }
128
129 static const struct drm_crtc_funcs dce_virtual_crtc_funcs = {
130         .cursor_set2 = NULL,
131         .cursor_move = NULL,
132         .gamma_set = dce_virtual_crtc_gamma_set,
133         .set_config = amdgpu_crtc_set_config,
134         .destroy = dce_virtual_crtc_destroy,
135         .page_flip_target = amdgpu_crtc_page_flip_target,
136 };
137
138 static void dce_virtual_crtc_dpms(struct drm_crtc *crtc, int mode)
139 {
140         struct drm_device *dev = crtc->dev;
141         struct amdgpu_device *adev = dev->dev_private;
142         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
143         unsigned type;
144
145         if (amdgpu_sriov_vf(adev))
146                 return;
147
148         switch (mode) {
149         case DRM_MODE_DPMS_ON:
150                 amdgpu_crtc->enabled = true;
151                 /* Make sure VBLANK interrupts are still enabled */
152                 type = amdgpu_crtc_idx_to_irq_type(adev, amdgpu_crtc->crtc_id);
153                 amdgpu_irq_update(adev, &adev->crtc_irq, type);
154                 drm_crtc_vblank_on(crtc);
155                 break;
156         case DRM_MODE_DPMS_STANDBY:
157         case DRM_MODE_DPMS_SUSPEND:
158         case DRM_MODE_DPMS_OFF:
159                 drm_crtc_vblank_off(crtc);
160                 amdgpu_crtc->enabled = false;
161                 break;
162         }
163 }
164
165
166 static void dce_virtual_crtc_prepare(struct drm_crtc *crtc)
167 {
168         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
169 }
170
171 static void dce_virtual_crtc_commit(struct drm_crtc *crtc)
172 {
173         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
174 }
175
176 static void dce_virtual_crtc_disable(struct drm_crtc *crtc)
177 {
178         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
179
180         dce_virtual_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
181         if (crtc->primary->fb) {
182                 int r;
183                 struct amdgpu_framebuffer *amdgpu_fb;
184                 struct amdgpu_bo *abo;
185
186                 amdgpu_fb = to_amdgpu_framebuffer(crtc->primary->fb);
187                 abo = gem_to_amdgpu_bo(amdgpu_fb->obj);
188                 r = amdgpu_bo_reserve(abo, true);
189                 if (unlikely(r))
190                         DRM_ERROR("failed to reserve abo before unpin\n");
191                 else {
192                         amdgpu_bo_unpin(abo);
193                         amdgpu_bo_unreserve(abo);
194                 }
195         }
196
197         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
198         amdgpu_crtc->encoder = NULL;
199         amdgpu_crtc->connector = NULL;
200 }
201
202 static int dce_virtual_crtc_mode_set(struct drm_crtc *crtc,
203                                   struct drm_display_mode *mode,
204                                   struct drm_display_mode *adjusted_mode,
205                                   int x, int y, struct drm_framebuffer *old_fb)
206 {
207         struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
208
209         /* update the hw version fpr dpm */
210         amdgpu_crtc->hw_mode = *adjusted_mode;
211
212         return 0;
213 }
214
215 static bool dce_virtual_crtc_mode_fixup(struct drm_crtc *crtc,
216                                      const struct drm_display_mode *mode,
217                                      struct drm_display_mode *adjusted_mode)
218 {
219         return true;
220 }
221
222
223 static int dce_virtual_crtc_set_base(struct drm_crtc *crtc, int x, int y,
224                                   struct drm_framebuffer *old_fb)
225 {
226         return 0;
227 }
228
229 static int dce_virtual_crtc_set_base_atomic(struct drm_crtc *crtc,
230                                          struct drm_framebuffer *fb,
231                                          int x, int y, enum mode_set_atomic state)
232 {
233         return 0;
234 }
235
236 static const struct drm_crtc_helper_funcs dce_virtual_crtc_helper_funcs = {
237         .dpms = dce_virtual_crtc_dpms,
238         .mode_fixup = dce_virtual_crtc_mode_fixup,
239         .mode_set = dce_virtual_crtc_mode_set,
240         .mode_set_base = dce_virtual_crtc_set_base,
241         .mode_set_base_atomic = dce_virtual_crtc_set_base_atomic,
242         .prepare = dce_virtual_crtc_prepare,
243         .commit = dce_virtual_crtc_commit,
244         .disable = dce_virtual_crtc_disable,
245 };
246
247 static int dce_virtual_crtc_init(struct amdgpu_device *adev, int index)
248 {
249         struct amdgpu_crtc *amdgpu_crtc;
250
251         amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
252                               (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
253         if (amdgpu_crtc == NULL)
254                 return -ENOMEM;
255
256         drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_virtual_crtc_funcs);
257
258         drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
259         amdgpu_crtc->crtc_id = index;
260         adev->mode_info.crtcs[index] = amdgpu_crtc;
261
262         amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
263         amdgpu_crtc->encoder = NULL;
264         amdgpu_crtc->connector = NULL;
265         amdgpu_crtc->vsync_timer_enabled = AMDGPU_IRQ_STATE_DISABLE;
266         drm_crtc_helper_add(&amdgpu_crtc->base, &dce_virtual_crtc_helper_funcs);
267
268         return 0;
269 }
270
271 static int dce_virtual_early_init(void *handle)
272 {
273         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
274
275         dce_virtual_set_display_funcs(adev);
276         dce_virtual_set_irq_funcs(adev);
277
278         adev->mode_info.num_hpd = 1;
279         adev->mode_info.num_dig = 1;
280         return 0;
281 }
282
283 static struct drm_encoder *
284 dce_virtual_encoder(struct drm_connector *connector)
285 {
286         int enc_id = connector->encoder_ids[0];
287         struct drm_encoder *encoder;
288         int i;
289
290         for (i = 0; i < DRM_CONNECTOR_MAX_ENCODER; i++) {
291                 if (connector->encoder_ids[i] == 0)
292                         break;
293
294                 encoder = drm_encoder_find(connector->dev, NULL, connector->encoder_ids[i]);
295                 if (!encoder)
296                         continue;
297
298                 if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
299                         return encoder;
300         }
301
302         /* pick the first one */
303         if (enc_id)
304                 return drm_encoder_find(connector->dev, NULL, enc_id);
305         return NULL;
306 }
307
308 static int dce_virtual_get_modes(struct drm_connector *connector)
309 {
310         struct drm_device *dev = connector->dev;
311         struct drm_display_mode *mode = NULL;
312         unsigned i;
313         static const struct mode_size {
314                 int w;
315                 int h;
316         } common_modes[17] = {
317                 { 640,  480},
318                 { 720,  480},
319                 { 800,  600},
320                 { 848,  480},
321                 {1024,  768},
322                 {1152,  768},
323                 {1280,  720},
324                 {1280,  800},
325                 {1280,  854},
326                 {1280,  960},
327                 {1280, 1024},
328                 {1440,  900},
329                 {1400, 1050},
330                 {1680, 1050},
331                 {1600, 1200},
332                 {1920, 1080},
333                 {1920, 1200}
334         };
335
336         for (i = 0; i < 17; i++) {
337                 mode = drm_cvt_mode(dev, common_modes[i].w, common_modes[i].h, 60, false, false, false);
338                 drm_mode_probed_add(connector, mode);
339         }
340
341         return 0;
342 }
343
344 static int dce_virtual_mode_valid(struct drm_connector *connector,
345                                   struct drm_display_mode *mode)
346 {
347         return MODE_OK;
348 }
349
350 static int
351 dce_virtual_dpms(struct drm_connector *connector, int mode)
352 {
353         return 0;
354 }
355
356 static int
357 dce_virtual_set_property(struct drm_connector *connector,
358                          struct drm_property *property,
359                          uint64_t val)
360 {
361         return 0;
362 }
363
364 static void dce_virtual_destroy(struct drm_connector *connector)
365 {
366         drm_connector_unregister(connector);
367         drm_connector_cleanup(connector);
368         kfree(connector);
369 }
370
371 static void dce_virtual_force(struct drm_connector *connector)
372 {
373         return;
374 }
375
376 static const struct drm_connector_helper_funcs dce_virtual_connector_helper_funcs = {
377         .get_modes = dce_virtual_get_modes,
378         .mode_valid = dce_virtual_mode_valid,
379         .best_encoder = dce_virtual_encoder,
380 };
381
382 static const struct drm_connector_funcs dce_virtual_connector_funcs = {
383         .dpms = dce_virtual_dpms,
384         .fill_modes = drm_helper_probe_single_connector_modes,
385         .set_property = dce_virtual_set_property,
386         .destroy = dce_virtual_destroy,
387         .force = dce_virtual_force,
388 };
389
390 static int dce_virtual_sw_init(void *handle)
391 {
392         int r, i;
393         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
394
395         r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, 229, &adev->crtc_irq);
396         if (r)
397                 return r;
398
399         adev->ddev->max_vblank_count = 0;
400
401         adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
402
403         adev->ddev->mode_config.max_width = 16384;
404         adev->ddev->mode_config.max_height = 16384;
405
406         adev->ddev->mode_config.preferred_depth = 24;
407         adev->ddev->mode_config.prefer_shadow = 1;
408
409         adev->ddev->mode_config.fb_base = adev->mc.aper_base;
410
411         r = amdgpu_modeset_create_props(adev);
412         if (r)
413                 return r;
414
415         adev->ddev->mode_config.max_width = 16384;
416         adev->ddev->mode_config.max_height = 16384;
417
418         /* allocate crtcs, encoders, connectors */
419         for (i = 0; i < adev->mode_info.num_crtc; i++) {
420                 r = dce_virtual_crtc_init(adev, i);
421                 if (r)
422                         return r;
423                 r = dce_virtual_connector_encoder_init(adev, i);
424                 if (r)
425                         return r;
426         }
427
428         drm_kms_helper_poll_init(adev->ddev);
429
430         adev->mode_info.mode_config_initialized = true;
431         return 0;
432 }
433
434 static int dce_virtual_sw_fini(void *handle)
435 {
436         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
437
438         kfree(adev->mode_info.bios_hardcoded_edid);
439
440         drm_kms_helper_poll_fini(adev->ddev);
441
442         drm_mode_config_cleanup(adev->ddev);
443         /* clear crtcs pointer to avoid dce irq finish routine access freed data */
444         memset(adev->mode_info.crtcs, 0, sizeof(adev->mode_info.crtcs[0]) * AMDGPU_MAX_CRTCS);
445         adev->mode_info.mode_config_initialized = false;
446         return 0;
447 }
448
449 static int dce_virtual_hw_init(void *handle)
450 {
451         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
452
453         switch (adev->asic_type) {
454 #ifdef CONFIG_DRM_AMDGPU_SI
455         case CHIP_TAHITI:
456         case CHIP_PITCAIRN:
457         case CHIP_VERDE:
458         case CHIP_OLAND:
459                 dce_v6_0_disable_dce(adev);
460                 break;
461 #endif
462 #ifdef CONFIG_DRM_AMDGPU_CIK
463         case CHIP_BONAIRE:
464         case CHIP_HAWAII:
465         case CHIP_KAVERI:
466         case CHIP_KABINI:
467         case CHIP_MULLINS:
468                 dce_v8_0_disable_dce(adev);
469                 break;
470 #endif
471         case CHIP_FIJI:
472         case CHIP_TONGA:
473                 dce_v10_0_disable_dce(adev);
474                 break;
475         case CHIP_CARRIZO:
476         case CHIP_STONEY:
477         case CHIP_POLARIS11:
478         case CHIP_POLARIS10:
479                 dce_v11_0_disable_dce(adev);
480                 break;
481         case CHIP_TOPAZ:
482 #ifdef CONFIG_DRM_AMDGPU_SI
483         case CHIP_HAINAN:
484 #endif
485                 /* no DCE */
486                 break;
487         case CHIP_VEGA10:
488                 break;
489         default:
490                 DRM_ERROR("Virtual display unsupported ASIC type: 0x%X\n", adev->asic_type);
491         }
492         return 0;
493 }
494
495 static int dce_virtual_hw_fini(void *handle)
496 {
497         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
498         int i = 0;
499
500         for (i = 0; i<adev->mode_info.num_crtc; i++)
501                 if (adev->mode_info.crtcs[i])
502                         dce_virtual_set_crtc_vblank_interrupt_state(adev, i, AMDGPU_IRQ_STATE_DISABLE);
503
504         return 0;
505 }
506
507 static int dce_virtual_suspend(void *handle)
508 {
509         return dce_virtual_hw_fini(handle);
510 }
511
512 static int dce_virtual_resume(void *handle)
513 {
514         return dce_virtual_hw_init(handle);
515 }
516
517 static bool dce_virtual_is_idle(void *handle)
518 {
519         return true;
520 }
521
522 static int dce_virtual_wait_for_idle(void *handle)
523 {
524         return 0;
525 }
526
527 static int dce_virtual_soft_reset(void *handle)
528 {
529         return 0;
530 }
531
532 static int dce_virtual_set_clockgating_state(void *handle,
533                                           enum amd_clockgating_state state)
534 {
535         return 0;
536 }
537
538 static int dce_virtual_set_powergating_state(void *handle,
539                                           enum amd_powergating_state state)
540 {
541         return 0;
542 }
543
544 static const struct amd_ip_funcs dce_virtual_ip_funcs = {
545         .name = "dce_virtual",
546         .early_init = dce_virtual_early_init,
547         .late_init = NULL,
548         .sw_init = dce_virtual_sw_init,
549         .sw_fini = dce_virtual_sw_fini,
550         .hw_init = dce_virtual_hw_init,
551         .hw_fini = dce_virtual_hw_fini,
552         .suspend = dce_virtual_suspend,
553         .resume = dce_virtual_resume,
554         .is_idle = dce_virtual_is_idle,
555         .wait_for_idle = dce_virtual_wait_for_idle,
556         .soft_reset = dce_virtual_soft_reset,
557         .set_clockgating_state = dce_virtual_set_clockgating_state,
558         .set_powergating_state = dce_virtual_set_powergating_state,
559 };
560
561 /* these are handled by the primary encoders */
562 static void dce_virtual_encoder_prepare(struct drm_encoder *encoder)
563 {
564         return;
565 }
566
567 static void dce_virtual_encoder_commit(struct drm_encoder *encoder)
568 {
569         return;
570 }
571
572 static void
573 dce_virtual_encoder_mode_set(struct drm_encoder *encoder,
574                              struct drm_display_mode *mode,
575                              struct drm_display_mode *adjusted_mode)
576 {
577         return;
578 }
579
580 static void dce_virtual_encoder_disable(struct drm_encoder *encoder)
581 {
582         return;
583 }
584
585 static void
586 dce_virtual_encoder_dpms(struct drm_encoder *encoder, int mode)
587 {
588         return;
589 }
590
591 static bool dce_virtual_encoder_mode_fixup(struct drm_encoder *encoder,
592                                     const struct drm_display_mode *mode,
593                                     struct drm_display_mode *adjusted_mode)
594 {
595         return true;
596 }
597
598 static const struct drm_encoder_helper_funcs dce_virtual_encoder_helper_funcs = {
599         .dpms = dce_virtual_encoder_dpms,
600         .mode_fixup = dce_virtual_encoder_mode_fixup,
601         .prepare = dce_virtual_encoder_prepare,
602         .mode_set = dce_virtual_encoder_mode_set,
603         .commit = dce_virtual_encoder_commit,
604         .disable = dce_virtual_encoder_disable,
605 };
606
607 static void dce_virtual_encoder_destroy(struct drm_encoder *encoder)
608 {
609         drm_encoder_cleanup(encoder);
610         kfree(encoder);
611 }
612
613 static const struct drm_encoder_funcs dce_virtual_encoder_funcs = {
614         .destroy = dce_virtual_encoder_destroy,
615 };
616
617 static int dce_virtual_connector_encoder_init(struct amdgpu_device *adev,
618                                               int index)
619 {
620         struct drm_encoder *encoder;
621         struct drm_connector *connector;
622
623         /* add a new encoder */
624         encoder = kzalloc(sizeof(struct drm_encoder), GFP_KERNEL);
625         if (!encoder)
626                 return -ENOMEM;
627         encoder->possible_crtcs = 1 << index;
628         drm_encoder_init(adev->ddev, encoder, &dce_virtual_encoder_funcs,
629                          DRM_MODE_ENCODER_VIRTUAL, NULL);
630         drm_encoder_helper_add(encoder, &dce_virtual_encoder_helper_funcs);
631
632         connector = kzalloc(sizeof(struct drm_connector), GFP_KERNEL);
633         if (!connector) {
634                 kfree(encoder);
635                 return -ENOMEM;
636         }
637
638         /* add a new connector */
639         drm_connector_init(adev->ddev, connector, &dce_virtual_connector_funcs,
640                            DRM_MODE_CONNECTOR_VIRTUAL);
641         drm_connector_helper_add(connector, &dce_virtual_connector_helper_funcs);
642         connector->display_info.subpixel_order = SubPixelHorizontalRGB;
643         connector->interlace_allowed = false;
644         connector->doublescan_allowed = false;
645         drm_connector_register(connector);
646
647         /* link them */
648         drm_mode_connector_attach_encoder(connector, encoder);
649
650         return 0;
651 }
652
653 static const struct amdgpu_display_funcs dce_virtual_display_funcs = {
654         .bandwidth_update = &dce_virtual_bandwidth_update,
655         .vblank_get_counter = &dce_virtual_vblank_get_counter,
656         .vblank_wait = &dce_virtual_vblank_wait,
657         .backlight_set_level = NULL,
658         .backlight_get_level = NULL,
659         .hpd_sense = &dce_virtual_hpd_sense,
660         .hpd_set_polarity = &dce_virtual_hpd_set_polarity,
661         .hpd_get_gpio_reg = &dce_virtual_hpd_get_gpio_reg,
662         .page_flip = &dce_virtual_page_flip,
663         .page_flip_get_scanoutpos = &dce_virtual_crtc_get_scanoutpos,
664         .add_encoder = NULL,
665         .add_connector = NULL,
666 };
667
668 static void dce_virtual_set_display_funcs(struct amdgpu_device *adev)
669 {
670         if (adev->mode_info.funcs == NULL)
671                 adev->mode_info.funcs = &dce_virtual_display_funcs;
672 }
673
674 static int dce_virtual_pageflip(struct amdgpu_device *adev,
675                                 unsigned crtc_id)
676 {
677         unsigned long flags;
678         struct amdgpu_crtc *amdgpu_crtc;
679         struct amdgpu_flip_work *works;
680
681         amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
682
683         if (crtc_id >= adev->mode_info.num_crtc) {
684                 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
685                 return -EINVAL;
686         }
687
688         /* IRQ could occur when in initial stage */
689         if (amdgpu_crtc == NULL)
690                 return 0;
691
692         spin_lock_irqsave(&adev->ddev->event_lock, flags);
693         works = amdgpu_crtc->pflip_works;
694         if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED) {
695                 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
696                         "AMDGPU_FLIP_SUBMITTED(%d)\n",
697                         amdgpu_crtc->pflip_status,
698                         AMDGPU_FLIP_SUBMITTED);
699                 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
700                 return 0;
701         }
702
703         /* page flip completed. clean up */
704         amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
705         amdgpu_crtc->pflip_works = NULL;
706
707         /* wakeup usersapce */
708         if (works->event)
709                 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
710
711         spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
712
713         drm_crtc_vblank_put(&amdgpu_crtc->base);
714         schedule_work(&works->unpin_work);
715
716         return 0;
717 }
718
719 static enum hrtimer_restart dce_virtual_vblank_timer_handle(struct hrtimer *vblank_timer)
720 {
721         struct amdgpu_crtc *amdgpu_crtc = container_of(vblank_timer,
722                                                        struct amdgpu_crtc, vblank_timer);
723         struct drm_device *ddev = amdgpu_crtc->base.dev;
724         struct amdgpu_device *adev = ddev->dev_private;
725
726         drm_handle_vblank(ddev, amdgpu_crtc->crtc_id);
727         dce_virtual_pageflip(adev, amdgpu_crtc->crtc_id);
728         hrtimer_start(vblank_timer, DCE_VIRTUAL_VBLANK_PERIOD,
729                       HRTIMER_MODE_REL);
730
731         return HRTIMER_NORESTART;
732 }
733
734 static void dce_virtual_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
735                                                         int crtc,
736                                                         enum amdgpu_interrupt_state state)
737 {
738         if (crtc >= adev->mode_info.num_crtc || !adev->mode_info.crtcs[crtc]) {
739                 DRM_DEBUG("invalid crtc %d\n", crtc);
740                 return;
741         }
742
743         if (state && !adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
744                 DRM_DEBUG("Enable software vsync timer\n");
745                 hrtimer_init(&adev->mode_info.crtcs[crtc]->vblank_timer,
746                              CLOCK_MONOTONIC, HRTIMER_MODE_REL);
747                 hrtimer_set_expires(&adev->mode_info.crtcs[crtc]->vblank_timer,
748                                     DCE_VIRTUAL_VBLANK_PERIOD);
749                 adev->mode_info.crtcs[crtc]->vblank_timer.function =
750                         dce_virtual_vblank_timer_handle;
751                 hrtimer_start(&adev->mode_info.crtcs[crtc]->vblank_timer,
752                               DCE_VIRTUAL_VBLANK_PERIOD, HRTIMER_MODE_REL);
753         } else if (!state && adev->mode_info.crtcs[crtc]->vsync_timer_enabled) {
754                 DRM_DEBUG("Disable software vsync timer\n");
755                 hrtimer_cancel(&adev->mode_info.crtcs[crtc]->vblank_timer);
756         }
757
758         adev->mode_info.crtcs[crtc]->vsync_timer_enabled = state;
759         DRM_DEBUG("[FM]set crtc %d vblank interrupt state %d\n", crtc, state);
760 }
761
762
763 static int dce_virtual_set_crtc_irq_state(struct amdgpu_device *adev,
764                                           struct amdgpu_irq_src *source,
765                                           unsigned type,
766                                           enum amdgpu_interrupt_state state)
767 {
768         if (type > AMDGPU_CRTC_IRQ_VBLANK6)
769                 return -EINVAL;
770
771         dce_virtual_set_crtc_vblank_interrupt_state(adev, type, state);
772
773         return 0;
774 }
775
776 static const struct amdgpu_irq_src_funcs dce_virtual_crtc_irq_funcs = {
777         .set = dce_virtual_set_crtc_irq_state,
778         .process = NULL,
779 };
780
781 static void dce_virtual_set_irq_funcs(struct amdgpu_device *adev)
782 {
783         adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VBLANK6 + 1;
784         adev->crtc_irq.funcs = &dce_virtual_crtc_irq_funcs;
785 }
786
787 const struct amdgpu_ip_block_version dce_virtual_ip_block =
788 {
789         .type = AMD_IP_BLOCK_TYPE_DCE,
790         .major = 1,
791         .minor = 0,
792         .rev = 0,
793         .funcs = &dce_virtual_ip_funcs,
794 };
This page took 0.082566 seconds and 4 git commands to generate.