2 * Copyright 2016 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include "dce_mem_input.h"
27 #include "reg_helper.h"
28 #include "basics/conversion.h"
36 #define FN(reg_name, field_name) \
37 dce_mi->shifts->field_name, dce_mi->masks->field_name
41 unsigned int page_width;
42 unsigned int page_height;
43 unsigned char min_pte_before_flip_horiz_scan;
44 unsigned char min_pte_before_flip_vert_scan;
45 unsigned char pte_req_per_chunk;
46 unsigned char param_6;
47 unsigned char param_7;
48 unsigned char param_8;
51 enum mi_bits_per_pixel {
59 enum mi_tiling_format {
66 static const struct pte_setting pte_settings[mi_tiling_count][mi_bpp_count] = {
67 [mi_tiling_linear] = {
68 { 8, 4096, 1, 8, 0, 1, 0, 0, 0},
69 { 16, 2048, 1, 8, 0, 1, 0, 0, 0},
70 { 32, 1024, 1, 8, 0, 1, 0, 0, 0},
71 { 64, 512, 1, 8, 0, 1, 0, 0, 0}, /* new for 64bpp from HW */
74 { 8, 512, 8, 1, 0, 1, 0, 0, 0}, /* 0 for invalid */
75 { 16, 256, 8, 2, 0, 1, 0, 0, 0},
76 { 32, 128, 8, 4, 0, 1, 0, 0, 0},
77 { 64, 64, 8, 4, 0, 1, 0, 0, 0}, /* fake */
80 { 8, 64, 64, 8, 8, 1, 4, 0, 0},
81 { 16, 64, 32, 8, 16, 1, 8, 0, 0},
82 { 32, 32, 32, 16, 16, 1, 8, 0, 0},
83 { 64, 8, 32, 16, 16, 1, 8, 0, 0}, /* fake */
87 static enum mi_bits_per_pixel get_mi_bpp(
88 enum surface_pixel_format format)
90 if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616)
92 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB8888)
94 else if (format >= SURFACE_PIXEL_FORMAT_GRPH_ARGB1555)
100 static enum mi_tiling_format get_mi_tiling(
101 struct dc_tiling_info *tiling_info)
103 switch (tiling_info->gfx8.array_mode) {
104 case DC_ARRAY_1D_TILED_THIN1:
105 case DC_ARRAY_1D_TILED_THICK:
106 case DC_ARRAY_PRT_TILED_THIN1:
108 case DC_ARRAY_2D_TILED_THIN1:
109 case DC_ARRAY_2D_TILED_THICK:
110 case DC_ARRAY_2D_TILED_X_THICK:
111 case DC_ARRAY_PRT_2D_TILED_THIN1:
112 case DC_ARRAY_PRT_2D_TILED_THICK:
114 case DC_ARRAY_LINEAR_GENERAL:
115 case DC_ARRAY_LINEAR_ALLIGNED:
116 return mi_tiling_linear;
122 static bool is_vert_scan(enum dc_rotation_angle rotation)
125 case ROTATION_ANGLE_90:
126 case ROTATION_ANGLE_270:
133 static void dce_mi_program_pte_vm(
134 struct mem_input *mi,
135 enum surface_pixel_format format,
136 struct dc_tiling_info *tiling_info,
137 enum dc_rotation_angle rotation)
139 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
140 enum mi_bits_per_pixel mi_bpp = get_mi_bpp(format);
141 enum mi_tiling_format mi_tiling = get_mi_tiling(tiling_info);
142 const struct pte_setting *pte = &pte_settings[mi_tiling][mi_bpp];
144 unsigned int page_width = log_2(pte->page_width);
145 unsigned int page_height = log_2(pte->page_height);
146 unsigned int min_pte_before_flip = is_vert_scan(rotation) ?
147 pte->min_pte_before_flip_vert_scan :
148 pte->min_pte_before_flip_horiz_scan;
150 REG_UPDATE(GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT,
151 GRPH_PIPE_OUTSTANDING_REQUEST_LIMIT, 0x7f);
153 REG_UPDATE_3(DVMM_PTE_CONTROL,
154 DVMM_PAGE_WIDTH, page_width,
155 DVMM_PAGE_HEIGHT, page_height,
156 DVMM_MIN_PTE_BEFORE_FLIP, min_pte_before_flip);
158 REG_UPDATE_2(DVMM_PTE_ARB_CONTROL,
159 DVMM_PTE_REQ_PER_CHUNK, pte->pte_req_per_chunk,
160 DVMM_MAX_PTE_REQ_OUTSTANDING, 0x7f);
163 static void program_urgency_watermark(
164 struct dce_mem_input *dce_mi,
166 uint32_t urgency_low_wm,
167 uint32_t urgency_high_wm)
169 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
170 URGENCY_WATERMARK_MASK, wm_select);
172 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
173 URGENCY_LOW_WATERMARK, urgency_low_wm,
174 URGENCY_HIGH_WATERMARK, urgency_high_wm);
177 #if defined(CONFIG_DRM_AMD_DC_SI)
178 static void dce60_program_urgency_watermark(
179 struct dce_mem_input *dce_mi,
181 uint32_t urgency_low_wm,
182 uint32_t urgency_high_wm)
184 REG_UPDATE(DPG_PIPE_ARBITRATION_CONTROL3,
185 URGENCY_WATERMARK_MASK, wm_select);
187 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
188 URGENCY_LOW_WATERMARK, urgency_low_wm,
189 URGENCY_HIGH_WATERMARK, urgency_high_wm);
193 static void dce120_program_urgency_watermark(
194 struct dce_mem_input *dce_mi,
196 uint32_t urgency_low_wm,
197 uint32_t urgency_high_wm)
199 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
200 URGENCY_WATERMARK_MASK, wm_select);
202 REG_SET_2(DPG_PIPE_URGENCY_CONTROL, 0,
203 URGENCY_LOW_WATERMARK, urgency_low_wm,
204 URGENCY_HIGH_WATERMARK, urgency_high_wm);
206 REG_SET_2(DPG_PIPE_URGENT_LEVEL_CONTROL, 0,
207 URGENT_LEVEL_LOW_WATERMARK, urgency_low_wm,
208 URGENT_LEVEL_HIGH_WATERMARK, urgency_high_wm);
212 #if defined(CONFIG_DRM_AMD_DC_SI)
213 static void dce60_program_nbp_watermark(
214 struct dce_mem_input *dce_mi,
218 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
219 NB_PSTATE_CHANGE_WATERMARK_MASK, wm_select);
221 REG_UPDATE_3(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
222 NB_PSTATE_CHANGE_ENABLE, 1,
223 NB_PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
224 NB_PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
226 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
227 NB_PSTATE_CHANGE_WATERMARK, nbp_wm);
231 static void program_nbp_watermark(
232 struct dce_mem_input *dce_mi,
236 if (REG(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL)) {
237 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
238 NB_PSTATE_CHANGE_WATERMARK_MASK, wm_select);
240 REG_UPDATE_3(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
241 NB_PSTATE_CHANGE_ENABLE, 1,
242 NB_PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
243 NB_PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
245 REG_UPDATE(DPG_PIPE_NB_PSTATE_CHANGE_CONTROL,
246 NB_PSTATE_CHANGE_WATERMARK, nbp_wm);
249 if (REG(DPG_PIPE_LOW_POWER_CONTROL)) {
250 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
251 PSTATE_CHANGE_WATERMARK_MASK, wm_select);
253 REG_UPDATE_3(DPG_PIPE_LOW_POWER_CONTROL,
254 PSTATE_CHANGE_ENABLE, 1,
255 PSTATE_CHANGE_URGENT_DURING_REQUEST, 1,
256 PSTATE_CHANGE_NOT_SELF_REFRESH_DURING_REQUEST, 1);
258 REG_UPDATE(DPG_PIPE_LOW_POWER_CONTROL,
259 PSTATE_CHANGE_WATERMARK, nbp_wm);
263 #if defined(CONFIG_DRM_AMD_DC_SI)
264 static void dce60_program_stutter_watermark(
265 struct dce_mem_input *dce_mi,
267 uint32_t stutter_mark)
269 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
270 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
272 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
273 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
277 static void dce120_program_stutter_watermark(
278 struct dce_mem_input *dce_mi,
280 uint32_t stutter_mark,
281 uint32_t stutter_entry)
283 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
284 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
286 if (REG(DPG_PIPE_STUTTER_CONTROL2))
287 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL2,
288 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark,
289 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry);
291 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
292 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark,
293 STUTTER_ENTER_SELF_REFRESH_WATERMARK, stutter_entry);
296 static void program_stutter_watermark(
297 struct dce_mem_input *dce_mi,
299 uint32_t stutter_mark)
301 REG_UPDATE(DPG_WATERMARK_MASK_CONTROL,
302 STUTTER_EXIT_SELF_REFRESH_WATERMARK_MASK, wm_select);
304 if (REG(DPG_PIPE_STUTTER_CONTROL2))
305 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL2,
306 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
308 REG_UPDATE(DPG_PIPE_STUTTER_CONTROL,
309 STUTTER_EXIT_SELF_REFRESH_WATERMARK, stutter_mark);
312 static void dce_mi_program_display_marks(
313 struct mem_input *mi,
314 struct dce_watermarks nbp,
315 struct dce_watermarks stutter_exit,
316 struct dce_watermarks stutter_enter,
317 struct dce_watermarks urgent,
318 uint32_t total_dest_line_time_ns)
320 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
321 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
323 program_urgency_watermark(dce_mi, 2, /* set a */
324 urgent.a_mark, total_dest_line_time_ns);
325 program_urgency_watermark(dce_mi, 1, /* set d */
326 urgent.d_mark, total_dest_line_time_ns);
328 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
329 STUTTER_ENABLE, stutter_en,
330 STUTTER_IGNORE_FBC, 1);
331 program_nbp_watermark(dce_mi, 2, nbp.a_mark); /* set a */
332 program_nbp_watermark(dce_mi, 1, nbp.d_mark); /* set d */
334 program_stutter_watermark(dce_mi, 2, stutter_exit.a_mark); /* set a */
335 program_stutter_watermark(dce_mi, 1, stutter_exit.d_mark); /* set d */
338 #if defined(CONFIG_DRM_AMD_DC_SI)
339 static void dce60_mi_program_display_marks(
340 struct mem_input *mi,
341 struct dce_watermarks nbp,
342 struct dce_watermarks stutter_exit,
343 struct dce_watermarks stutter_enter,
344 struct dce_watermarks urgent,
345 uint32_t total_dest_line_time_ns)
347 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
348 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
350 dce60_program_urgency_watermark(dce_mi, 2, /* set a */
351 urgent.a_mark, total_dest_line_time_ns);
352 dce60_program_urgency_watermark(dce_mi, 1, /* set d */
353 urgent.d_mark, total_dest_line_time_ns);
355 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
356 STUTTER_ENABLE, stutter_en,
357 STUTTER_IGNORE_FBC, 1);
358 dce60_program_nbp_watermark(dce_mi, 2, nbp.a_mark); /* set a */
359 dce60_program_nbp_watermark(dce_mi, 1, nbp.d_mark); /* set d */
361 dce60_program_stutter_watermark(dce_mi, 2, stutter_exit.a_mark); /* set a */
362 dce60_program_stutter_watermark(dce_mi, 1, stutter_exit.d_mark); /* set d */
366 static void dce112_mi_program_display_marks(struct mem_input *mi,
367 struct dce_watermarks nbp,
368 struct dce_watermarks stutter_exit,
369 struct dce_watermarks stutter_entry,
370 struct dce_watermarks urgent,
371 uint32_t total_dest_line_time_ns)
373 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
374 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
376 program_urgency_watermark(dce_mi, 0, /* set a */
377 urgent.a_mark, total_dest_line_time_ns);
378 program_urgency_watermark(dce_mi, 1, /* set b */
379 urgent.b_mark, total_dest_line_time_ns);
380 program_urgency_watermark(dce_mi, 2, /* set c */
381 urgent.c_mark, total_dest_line_time_ns);
382 program_urgency_watermark(dce_mi, 3, /* set d */
383 urgent.d_mark, total_dest_line_time_ns);
385 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
386 STUTTER_ENABLE, stutter_en,
387 STUTTER_IGNORE_FBC, 1);
388 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */
389 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */
390 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */
391 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */
393 program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark); /* set a */
394 program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark); /* set b */
395 program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark); /* set c */
396 program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark); /* set d */
399 static void dce120_mi_program_display_marks(struct mem_input *mi,
400 struct dce_watermarks nbp,
401 struct dce_watermarks stutter_exit,
402 struct dce_watermarks stutter_entry,
403 struct dce_watermarks urgent,
404 uint32_t total_dest_line_time_ns)
406 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
407 uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
409 dce120_program_urgency_watermark(dce_mi, 0, /* set a */
410 urgent.a_mark, total_dest_line_time_ns);
411 dce120_program_urgency_watermark(dce_mi, 1, /* set b */
412 urgent.b_mark, total_dest_line_time_ns);
413 dce120_program_urgency_watermark(dce_mi, 2, /* set c */
414 urgent.c_mark, total_dest_line_time_ns);
415 dce120_program_urgency_watermark(dce_mi, 3, /* set d */
416 urgent.d_mark, total_dest_line_time_ns);
418 REG_UPDATE_2(DPG_PIPE_STUTTER_CONTROL,
419 STUTTER_ENABLE, stutter_en,
420 STUTTER_IGNORE_FBC, 1);
421 program_nbp_watermark(dce_mi, 0, nbp.a_mark); /* set a */
422 program_nbp_watermark(dce_mi, 1, nbp.b_mark); /* set b */
423 program_nbp_watermark(dce_mi, 2, nbp.c_mark); /* set c */
424 program_nbp_watermark(dce_mi, 3, nbp.d_mark); /* set d */
426 dce120_program_stutter_watermark(dce_mi, 0, stutter_exit.a_mark, stutter_entry.a_mark); /* set a */
427 dce120_program_stutter_watermark(dce_mi, 1, stutter_exit.b_mark, stutter_entry.b_mark); /* set b */
428 dce120_program_stutter_watermark(dce_mi, 2, stutter_exit.c_mark, stutter_entry.c_mark); /* set c */
429 dce120_program_stutter_watermark(dce_mi, 3, stutter_exit.d_mark, stutter_entry.d_mark); /* set d */
432 static void program_tiling(
433 struct dce_mem_input *dce_mi, const struct dc_tiling_info *info)
435 if (dce_mi->masks->GRPH_SW_MODE) { /* GFX9 */
436 REG_UPDATE_6(GRPH_CONTROL,
437 GRPH_SW_MODE, info->gfx9.swizzle,
438 GRPH_NUM_BANKS, log_2(info->gfx9.num_banks),
439 GRPH_NUM_SHADER_ENGINES, log_2(info->gfx9.num_shader_engines),
440 GRPH_NUM_PIPES, log_2(info->gfx9.num_pipes),
441 GRPH_COLOR_EXPANSION_MODE, 1,
442 GRPH_SE_ENABLE, info->gfx9.shaderEnable);
443 /* TODO: DCP0_GRPH_CONTROL__GRPH_SE_ENABLE where to get info
449 if (dce_mi->masks->GRPH_MICRO_TILE_MODE) { /* GFX8 */
450 REG_UPDATE_9(GRPH_CONTROL,
451 GRPH_NUM_BANKS, info->gfx8.num_banks,
452 GRPH_BANK_WIDTH, info->gfx8.bank_width,
453 GRPH_BANK_HEIGHT, info->gfx8.bank_height,
454 GRPH_MACRO_TILE_ASPECT, info->gfx8.tile_aspect,
455 GRPH_TILE_SPLIT, info->gfx8.tile_split,
456 GRPH_MICRO_TILE_MODE, info->gfx8.tile_mode,
457 GRPH_PIPE_CONFIG, info->gfx8.pipe_config,
458 GRPH_ARRAY_MODE, info->gfx8.array_mode,
459 GRPH_COLOR_EXPANSION_MODE, 1);
460 /* 01 - DCP_GRPH_COLOR_EXPANSION_MODE_ZEXP: zero expansion for YCbCr */
466 if (dce_mi->masks->GRPH_ARRAY_MODE) { /* GFX6 but reuses gfx8 struct */
467 REG_UPDATE_8(GRPH_CONTROL,
468 GRPH_NUM_BANKS, info->gfx8.num_banks,
469 GRPH_BANK_WIDTH, info->gfx8.bank_width,
470 GRPH_BANK_HEIGHT, info->gfx8.bank_height,
471 GRPH_MACRO_TILE_ASPECT, info->gfx8.tile_aspect,
472 GRPH_TILE_SPLIT, info->gfx8.tile_split,
473 /* DCE6 has no GRPH_MICRO_TILE_MODE mask */
474 GRPH_PIPE_CONFIG, info->gfx8.pipe_config,
475 GRPH_ARRAY_MODE, info->gfx8.array_mode,
476 GRPH_COLOR_EXPANSION_MODE, 1);
477 /* 01 - DCP_GRPH_COLOR_EXPANSION_MODE_ZEXP: zero expansion for YCbCr */
484 static void program_size_and_rotation(
485 struct dce_mem_input *dce_mi,
486 enum dc_rotation_angle rotation,
487 const struct plane_size *plane_size)
489 const struct rect *in_rect = &plane_size->surface_size;
490 struct rect hw_rect = plane_size->surface_size;
491 const uint32_t rotation_angles[ROTATION_ANGLE_COUNT] = {
492 [ROTATION_ANGLE_0] = 0,
493 [ROTATION_ANGLE_90] = 1,
494 [ROTATION_ANGLE_180] = 2,
495 [ROTATION_ANGLE_270] = 3,
498 if (rotation == ROTATION_ANGLE_90 || rotation == ROTATION_ANGLE_270) {
499 hw_rect.x = in_rect->y;
500 hw_rect.y = in_rect->x;
502 hw_rect.height = in_rect->width;
503 hw_rect.width = in_rect->height;
506 REG_SET(GRPH_X_START, 0,
507 GRPH_X_START, hw_rect.x);
509 REG_SET(GRPH_Y_START, 0,
510 GRPH_Y_START, hw_rect.y);
512 REG_SET(GRPH_X_END, 0,
513 GRPH_X_END, hw_rect.width);
515 REG_SET(GRPH_Y_END, 0,
516 GRPH_Y_END, hw_rect.height);
518 REG_SET(GRPH_PITCH, 0,
519 GRPH_PITCH, plane_size->surface_pitch);
521 REG_SET(HW_ROTATION, 0,
522 GRPH_ROTATION_ANGLE, rotation_angles[rotation]);
525 #if defined(CONFIG_DRM_AMD_DC_SI)
526 static void dce60_program_size(
527 struct dce_mem_input *dce_mi,
528 enum dc_rotation_angle rotation, /* not used in DCE6 */
529 const struct plane_size *plane_size)
531 struct rect hw_rect = plane_size->surface_size;
532 /* DCE6 has no HW rotation, skip rotation_angles declaration */
534 /* DCE6 has no HW rotation, skip ROTATION_ANGLE_* processing */
536 REG_SET(GRPH_X_START, 0,
537 GRPH_X_START, hw_rect.x);
539 REG_SET(GRPH_Y_START, 0,
540 GRPH_Y_START, hw_rect.y);
542 REG_SET(GRPH_X_END, 0,
543 GRPH_X_END, hw_rect.width);
545 REG_SET(GRPH_Y_END, 0,
546 GRPH_Y_END, hw_rect.height);
548 REG_SET(GRPH_PITCH, 0,
549 GRPH_PITCH, plane_size->surface_pitch);
551 /* DCE6 has no HW_ROTATION register, skip setting rotation_angles */
555 static void program_grph_pixel_format(
556 struct dce_mem_input *dce_mi,
557 enum surface_pixel_format format)
559 uint32_t red_xbar = 0, blue_xbar = 0; /* no swap */
560 uint32_t grph_depth = 0, grph_format = 0;
561 uint32_t sign = 0, floating = 0;
563 if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888 ||
564 /*todo: doesn't look like we handle BGRA here,
565 * should problem swap endian*/
566 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010 ||
567 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS ||
568 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616 ||
569 format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) {
575 REG_SET_2(GRPH_SWAP_CNTL, 0,
576 GRPH_RED_CROSSBAR, red_xbar,
577 GRPH_BLUE_CROSSBAR, blue_xbar);
580 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
584 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
588 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
592 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
593 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
597 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
598 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
599 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
603 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
607 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: /* shouldn't this get float too? */
608 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
609 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616:
614 DC_ERR("unsupported grph pixel format");
618 REG_UPDATE_2(GRPH_CONTROL,
619 GRPH_DEPTH, grph_depth,
620 GRPH_FORMAT, grph_format);
622 REG_UPDATE_4(PRESCALE_GRPH_CONTROL,
623 GRPH_PRESCALE_SELECT, floating,
624 GRPH_PRESCALE_R_SIGN, sign,
625 GRPH_PRESCALE_G_SIGN, sign,
626 GRPH_PRESCALE_B_SIGN, sign);
629 static void dce_mi_clear_tiling(
630 struct mem_input *mi)
632 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
634 if (dce_mi->masks->GRPH_SW_MODE) { /* GFX9 */
635 REG_UPDATE(GRPH_CONTROL,
636 GRPH_SW_MODE, DC_SW_LINEAR);
639 if (dce_mi->masks->GRPH_MICRO_TILE_MODE) { /* GFX8 */
640 REG_UPDATE(GRPH_CONTROL,
641 GRPH_ARRAY_MODE, DC_SW_LINEAR);
644 if (dce_mi->masks->GRPH_ARRAY_MODE) { /* GFX6 but reuses gfx8 struct */
645 REG_UPDATE(GRPH_CONTROL,
646 GRPH_ARRAY_MODE, DC_SW_LINEAR);
650 static void dce_mi_program_surface_config(
651 struct mem_input *mi,
652 enum surface_pixel_format format,
653 struct dc_tiling_info *tiling_info,
654 struct plane_size *plane_size,
655 enum dc_rotation_angle rotation,
656 struct dc_plane_dcc_param *dcc,
657 bool horizontal_mirror)
659 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
660 REG_UPDATE(GRPH_ENABLE, GRPH_ENABLE, 1);
662 program_tiling(dce_mi, tiling_info);
663 program_size_and_rotation(dce_mi, rotation, plane_size);
665 if (format < SURFACE_PIXEL_FORMAT_VIDEO_BEGIN)
666 program_grph_pixel_format(dce_mi, format);
669 #if defined(CONFIG_DRM_AMD_DC_SI)
670 static void dce60_mi_program_surface_config(
671 struct mem_input *mi,
672 enum surface_pixel_format format,
673 struct dc_tiling_info *tiling_info,
674 struct plane_size *plane_size,
675 enum dc_rotation_angle rotation, /* not used in DCE6 */
676 struct dc_plane_dcc_param *dcc,
677 bool horizontal_mirror)
679 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
680 REG_UPDATE(GRPH_ENABLE, GRPH_ENABLE, 1);
682 program_tiling(dce_mi, tiling_info);
683 dce60_program_size(dce_mi, rotation, plane_size);
685 if (format < SURFACE_PIXEL_FORMAT_VIDEO_BEGIN)
686 program_grph_pixel_format(dce_mi, format);
690 static uint32_t get_dmif_switch_time_us(
693 uint32_t pix_clk_khz)
696 uint32_t pixels_per_second;
697 uint32_t pixels_per_frame;
698 uint32_t refresh_rate;
699 const uint32_t us_in_sec = 1000000;
700 const uint32_t min_single_frame_time_us = 30000;
701 /*return double of frame time*/
702 const uint32_t single_frame_time_multiplier = 2;
704 if (!h_total || v_total || !pix_clk_khz)
705 return single_frame_time_multiplier * min_single_frame_time_us;
707 /*TODO: should we use pixel format normalized pixel clock here?*/
708 pixels_per_second = pix_clk_khz * 1000;
709 pixels_per_frame = h_total * v_total;
711 if (!pixels_per_second || !pixels_per_frame) {
712 /* avoid division by zero */
713 ASSERT(pixels_per_frame);
714 ASSERT(pixels_per_second);
715 return single_frame_time_multiplier * min_single_frame_time_us;
718 refresh_rate = pixels_per_second / pixels_per_frame;
721 /* avoid division by zero*/
722 ASSERT(refresh_rate);
723 return single_frame_time_multiplier * min_single_frame_time_us;
726 frame_time = us_in_sec / refresh_rate;
728 if (frame_time < min_single_frame_time_us)
729 frame_time = min_single_frame_time_us;
731 frame_time *= single_frame_time_multiplier;
736 static void dce_mi_allocate_dmif(
737 struct mem_input *mi,
740 uint32_t pix_clk_khz,
741 uint32_t total_stream_num)
743 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
744 const uint32_t retry_delay = 10;
745 uint32_t retry_count = get_dmif_switch_time_us(
748 pix_clk_khz) / retry_delay;
751 uint32_t buffers_allocated;
752 uint32_t dmif_buffer_control;
754 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL,
755 DMIF_BUFFERS_ALLOCATED, &buffers_allocated);
757 if (buffers_allocated == 2)
760 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control,
761 DMIF_BUFFERS_ALLOCATED, 2);
763 REG_WAIT(DMIF_BUFFER_CONTROL,
764 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1,
765 retry_delay, retry_count);
767 if (pix_clk_khz != 0) {
768 pix_dur = 1000000000ULL / pix_clk_khz;
770 REG_UPDATE(DPG_PIPE_ARBITRATION_CONTROL1,
771 PIXEL_DURATION, pix_dur);
774 if (dce_mi->wa.single_head_rdreq_dmif_limit) {
775 uint32_t enable = (total_stream_num > 1) ? 0 :
776 dce_mi->wa.single_head_rdreq_dmif_limit;
778 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT,
783 static void dce_mi_free_dmif(
784 struct mem_input *mi,
785 uint32_t total_stream_num)
787 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mi);
788 uint32_t buffers_allocated;
789 uint32_t dmif_buffer_control;
791 dmif_buffer_control = REG_GET(DMIF_BUFFER_CONTROL,
792 DMIF_BUFFERS_ALLOCATED, &buffers_allocated);
794 if (buffers_allocated == 0)
797 REG_SET(DMIF_BUFFER_CONTROL, dmif_buffer_control,
798 DMIF_BUFFERS_ALLOCATED, 0);
800 REG_WAIT(DMIF_BUFFER_CONTROL,
801 DMIF_BUFFERS_ALLOCATION_COMPLETED, 1,
804 if (dce_mi->wa.single_head_rdreq_dmif_limit) {
805 uint32_t enable = (total_stream_num > 1) ? 0 :
806 dce_mi->wa.single_head_rdreq_dmif_limit;
808 REG_UPDATE(MC_HUB_RDREQ_DMIF_LIMIT,
814 static void program_sec_addr(
815 struct dce_mem_input *dce_mi,
816 PHYSICAL_ADDRESS_LOC address)
818 /*high register MUST be programmed first*/
819 REG_SET(GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0,
820 GRPH_SECONDARY_SURFACE_ADDRESS_HIGH,
823 REG_SET_2(GRPH_SECONDARY_SURFACE_ADDRESS, 0,
824 GRPH_SECONDARY_SURFACE_ADDRESS, address.low_part >> 8,
825 GRPH_SECONDARY_DFQ_ENABLE, 0);
828 static void program_pri_addr(
829 struct dce_mem_input *dce_mi,
830 PHYSICAL_ADDRESS_LOC address)
832 /*high register MUST be programmed first*/
833 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0,
834 GRPH_PRIMARY_SURFACE_ADDRESS_HIGH,
837 REG_SET(GRPH_PRIMARY_SURFACE_ADDRESS, 0,
838 GRPH_PRIMARY_SURFACE_ADDRESS,
839 address.low_part >> 8);
843 static bool dce_mi_is_flip_pending(struct mem_input *mem_input)
845 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input);
846 uint32_t update_pending;
848 REG_GET(GRPH_UPDATE, GRPH_SURFACE_UPDATE_PENDING, &update_pending);
852 mem_input->current_address = mem_input->request_address;
856 static bool dce_mi_program_surface_flip_and_addr(
857 struct mem_input *mem_input,
858 const struct dc_plane_address *address,
861 struct dce_mem_input *dce_mi = TO_DCE_MEM_INPUT(mem_input);
863 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 1);
867 GRPH_SURFACE_UPDATE_H_RETRACE_EN, flip_immediate ? 1 : 0);
869 switch (address->type) {
870 case PLN_ADDR_TYPE_GRAPHICS:
871 if (address->grph.addr.quad_part == 0)
873 program_pri_addr(dce_mi, address->grph.addr);
875 case PLN_ADDR_TYPE_GRPH_STEREO:
876 if (address->grph_stereo.left_addr.quad_part == 0 ||
877 address->grph_stereo.right_addr.quad_part == 0)
879 program_pri_addr(dce_mi, address->grph_stereo.left_addr);
880 program_sec_addr(dce_mi, address->grph_stereo.right_addr);
888 mem_input->request_address = *address;
891 mem_input->current_address = *address;
893 REG_UPDATE(GRPH_UPDATE, GRPH_UPDATE_LOCK, 0);
898 static const struct mem_input_funcs dce_mi_funcs = {
899 .mem_input_program_display_marks = dce_mi_program_display_marks,
900 .allocate_mem_input = dce_mi_allocate_dmif,
901 .free_mem_input = dce_mi_free_dmif,
902 .mem_input_program_surface_flip_and_addr =
903 dce_mi_program_surface_flip_and_addr,
904 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
905 .mem_input_program_surface_config =
906 dce_mi_program_surface_config,
907 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
908 .mem_input_clear_tiling = dce_mi_clear_tiling,
911 #if defined(CONFIG_DRM_AMD_DC_SI)
912 static const struct mem_input_funcs dce60_mi_funcs = {
913 .mem_input_program_display_marks = dce60_mi_program_display_marks,
914 .allocate_mem_input = dce_mi_allocate_dmif,
915 .free_mem_input = dce_mi_free_dmif,
916 .mem_input_program_surface_flip_and_addr =
917 dce_mi_program_surface_flip_and_addr,
918 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
919 .mem_input_program_surface_config =
920 dce60_mi_program_surface_config,
921 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
922 .mem_input_clear_tiling = dce_mi_clear_tiling,
926 static const struct mem_input_funcs dce112_mi_funcs = {
927 .mem_input_program_display_marks = dce112_mi_program_display_marks,
928 .allocate_mem_input = dce_mi_allocate_dmif,
929 .free_mem_input = dce_mi_free_dmif,
930 .mem_input_program_surface_flip_and_addr =
931 dce_mi_program_surface_flip_and_addr,
932 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
933 .mem_input_program_surface_config =
934 dce_mi_program_surface_config,
935 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
936 .mem_input_clear_tiling = dce_mi_clear_tiling,
939 static const struct mem_input_funcs dce120_mi_funcs = {
940 .mem_input_program_display_marks = dce120_mi_program_display_marks,
941 .allocate_mem_input = dce_mi_allocate_dmif,
942 .free_mem_input = dce_mi_free_dmif,
943 .mem_input_program_surface_flip_and_addr =
944 dce_mi_program_surface_flip_and_addr,
945 .mem_input_program_pte_vm = dce_mi_program_pte_vm,
946 .mem_input_program_surface_config =
947 dce_mi_program_surface_config,
948 .mem_input_is_flip_pending = dce_mi_is_flip_pending,
949 .mem_input_clear_tiling = dce_mi_clear_tiling,
952 void dce_mem_input_construct(
953 struct dce_mem_input *dce_mi,
954 struct dc_context *ctx,
956 const struct dce_mem_input_registers *regs,
957 const struct dce_mem_input_shift *mi_shift,
958 const struct dce_mem_input_mask *mi_mask)
960 dce_mi->base.ctx = ctx;
962 dce_mi->base.inst = inst;
963 dce_mi->base.funcs = &dce_mi_funcs;
966 dce_mi->shifts = mi_shift;
967 dce_mi->masks = mi_mask;
970 #if defined(CONFIG_DRM_AMD_DC_SI)
971 void dce60_mem_input_construct(
972 struct dce_mem_input *dce_mi,
973 struct dc_context *ctx,
975 const struct dce_mem_input_registers *regs,
976 const struct dce_mem_input_shift *mi_shift,
977 const struct dce_mem_input_mask *mi_mask)
979 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
980 dce_mi->base.funcs = &dce60_mi_funcs;
984 void dce112_mem_input_construct(
985 struct dce_mem_input *dce_mi,
986 struct dc_context *ctx,
988 const struct dce_mem_input_registers *regs,
989 const struct dce_mem_input_shift *mi_shift,
990 const struct dce_mem_input_mask *mi_mask)
992 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
993 dce_mi->base.funcs = &dce112_mi_funcs;
996 void dce120_mem_input_construct(
997 struct dce_mem_input *dce_mi,
998 struct dc_context *ctx,
1000 const struct dce_mem_input_registers *regs,
1001 const struct dce_mem_input_shift *mi_shift,
1002 const struct dce_mem_input_mask *mi_mask)
1004 dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
1005 dce_mi->base.funcs = &dce120_mi_funcs;