]> Git Repo - linux.git/blob - drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
net: wan: Add framer framework support
[linux.git] / drivers / gpu / drm / amd / display / dc / clk_mgr / clk_mgr.c
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25
26 #include <linux/slab.h>
27
28 #include "dal_asic_id.h"
29 #include "dc_types.h"
30 #include "dccg.h"
31 #include "clk_mgr_internal.h"
32 #include "link.h"
33
34 #include "dce100/dce_clk_mgr.h"
35 #include "dce110/dce110_clk_mgr.h"
36 #include "dce112/dce112_clk_mgr.h"
37 #include "dce120/dce120_clk_mgr.h"
38 #include "dce60/dce60_clk_mgr.h"
39 #include "dcn10/rv1_clk_mgr.h"
40 #include "dcn10/rv2_clk_mgr.h"
41 #include "dcn20/dcn20_clk_mgr.h"
42 #include "dcn21/rn_clk_mgr.h"
43 #include "dcn201/dcn201_clk_mgr.h"
44 #include "dcn30/dcn30_clk_mgr.h"
45 #include "dcn301/vg_clk_mgr.h"
46 #include "dcn31/dcn31_clk_mgr.h"
47 #include "dcn314/dcn314_clk_mgr.h"
48 #include "dcn315/dcn315_clk_mgr.h"
49 #include "dcn316/dcn316_clk_mgr.h"
50 #include "dcn32/dcn32_clk_mgr.h"
51 #include "dcn35/dcn35_clk_mgr.h"
52
53 int clk_mgr_helper_get_active_display_cnt(
54                 struct dc *dc,
55                 struct dc_state *context)
56 {
57         int i, display_count;
58
59         display_count = 0;
60         for (i = 0; i < context->stream_count; i++) {
61                 const struct dc_stream_state *stream = context->streams[i];
62
63                 /* Don't count SubVP phantom pipes as part of active
64                  * display count
65                  */
66                 if (stream->mall_stream_config.type == SUBVP_PHANTOM)
67                         continue;
68
69                 /*
70                  * Only notify active stream or virtual stream.
71                  * Need to notify virtual stream to work around
72                  * headless case. HPD does not fire when system is in
73                  * S0i2.
74                  */
75                 if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
76                         display_count++;
77         }
78
79         return display_count;
80 }
81
82 int clk_mgr_helper_get_active_plane_cnt(
83                 struct dc *dc,
84                 struct dc_state *context)
85 {
86         int i, total_plane_count;
87
88         total_plane_count = 0;
89         for (i = 0; i < context->stream_count; i++) {
90                 const struct dc_stream_status stream_status = context->stream_status[i];
91
92                 /*
93                  * Sum up plane_count for all streams ( active and virtual ).
94                  */
95                 total_plane_count += stream_status.plane_count;
96         }
97
98         return total_plane_count;
99 }
100
101 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
102 {
103         struct dc_link *edp_links[MAX_NUM_EDP];
104         struct dc_link *edp_link = NULL;
105         int edp_num;
106         unsigned int panel_inst;
107
108         dc_get_edp_links(dc, edp_links, &edp_num);
109         if (dc->hwss.exit_optimized_pwr_state)
110                 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
111
112         if (edp_num) {
113                 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
114                         bool allow_active = false;
115
116                         edp_link = edp_links[panel_inst];
117                         if (!edp_link->psr_settings.psr_feature_enabled)
118                                 continue;
119                         clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
120                         dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
121                         dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
122                 }
123         }
124
125 }
126
127 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
128 {
129         struct dc_link *edp_links[MAX_NUM_EDP];
130         struct dc_link *edp_link = NULL;
131         int edp_num;
132         unsigned int panel_inst;
133
134         dc_get_edp_links(dc, edp_links, &edp_num);
135         if (edp_num) {
136                 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
137                         edp_link = edp_links[panel_inst];
138                         if (!edp_link->psr_settings.psr_feature_enabled)
139                                 continue;
140                         dc->link_srv->edp_set_psr_allow_active(edp_link,
141                                         &clk_mgr->psr_allow_active_cache, false, false, NULL);
142                         dc->link_srv->edp_set_replay_allow_active(edp_link,
143                                         &clk_mgr->psr_allow_active_cache, false, false, NULL);
144                 }
145         }
146
147         if (dc->hwss.optimize_pwr_state)
148                 dc->hwss.optimize_pwr_state(dc, dc->current_state);
149
150 }
151
152 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
153 {
154         struct hw_asic_id asic_id = ctx->asic_id;
155
156         switch (asic_id.chip_family) {
157 #if defined(CONFIG_DRM_AMD_DC_SI)
158         case FAMILY_SI: {
159                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
160
161                 if (clk_mgr == NULL) {
162                         BREAK_TO_DEBUGGER();
163                         return NULL;
164                 }
165                 dce60_clk_mgr_construct(ctx, clk_mgr);
166                 dce_clk_mgr_construct(ctx, clk_mgr);
167                 return &clk_mgr->base;
168         }
169 #endif
170         case FAMILY_CI:
171         case FAMILY_KV: {
172                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
173
174                 if (clk_mgr == NULL) {
175                         BREAK_TO_DEBUGGER();
176                         return NULL;
177                 }
178                 dce_clk_mgr_construct(ctx, clk_mgr);
179                 return &clk_mgr->base;
180         }
181         case FAMILY_CZ: {
182                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
183
184                 if (clk_mgr == NULL) {
185                         BREAK_TO_DEBUGGER();
186                         return NULL;
187                 }
188                 dce110_clk_mgr_construct(ctx, clk_mgr);
189                 return &clk_mgr->base;
190         }
191         case FAMILY_VI: {
192                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
193
194                 if (clk_mgr == NULL) {
195                         BREAK_TO_DEBUGGER();
196                         return NULL;
197                 }
198                 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
199                                 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
200                         dce_clk_mgr_construct(ctx, clk_mgr);
201                         return &clk_mgr->base;
202                 }
203                 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
204                                 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
205                                 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
206                         dce112_clk_mgr_construct(ctx, clk_mgr);
207                         return &clk_mgr->base;
208                 }
209                 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
210                         dce112_clk_mgr_construct(ctx, clk_mgr);
211                         return &clk_mgr->base;
212                 }
213                 return &clk_mgr->base;
214         }
215         case FAMILY_AI: {
216                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
217
218                 if (clk_mgr == NULL) {
219                         BREAK_TO_DEBUGGER();
220                         return NULL;
221                 }
222                 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
223                         dce121_clk_mgr_construct(ctx, clk_mgr);
224                 else
225                         dce120_clk_mgr_construct(ctx, clk_mgr);
226                 return &clk_mgr->base;
227         }
228 #if defined(CONFIG_DRM_AMD_DC_FP)
229         case FAMILY_RV: {
230                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
231
232                 if (clk_mgr == NULL) {
233                         BREAK_TO_DEBUGGER();
234                         return NULL;
235                 }
236
237                 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
238                         rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
239                         return &clk_mgr->base;
240                 }
241
242                 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
243                         rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
244                         return &clk_mgr->base;
245                 }
246                 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
247                         rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
248                         return &clk_mgr->base;
249                 }
250                 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
251                                 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
252                         rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
253                         return &clk_mgr->base;
254                 }
255                 return &clk_mgr->base;
256         }
257         case FAMILY_NV: {
258                 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
259
260                 if (clk_mgr == NULL) {
261                         BREAK_TO_DEBUGGER();
262                         return NULL;
263                 }
264                 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
265                         dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
266                         return &clk_mgr->base;
267                 }
268                 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
269                         dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
270                         return &clk_mgr->base;
271                 }
272                 if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
273                         dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
274                         return &clk_mgr->base;
275                 }
276                 if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
277                         dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
278                         return &clk_mgr->base;
279                 }
280                 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
281                 return &clk_mgr->base;
282         }
283         case FAMILY_VGH:
284                 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
285                         struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
286
287                         if (clk_mgr == NULL) {
288                                 BREAK_TO_DEBUGGER();
289                                 return NULL;
290                         }
291                         vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
292                         return &clk_mgr->base.base;
293                 }
294                 break;
295
296         case FAMILY_YELLOW_CARP: {
297                 struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
298
299                 if (clk_mgr == NULL) {
300                         BREAK_TO_DEBUGGER();
301                         return NULL;
302                 }
303
304                 dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
305                 return &clk_mgr->base.base;
306         }
307                 break;
308         case AMDGPU_FAMILY_GC_10_3_6: {
309                 struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
310
311                 if (clk_mgr == NULL) {
312                         BREAK_TO_DEBUGGER();
313                         return NULL;
314                 }
315
316                 dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
317                 return &clk_mgr->base.base;
318         }
319                 break;
320         case AMDGPU_FAMILY_GC_10_3_7: {
321                 struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
322
323                 if (clk_mgr == NULL) {
324                         BREAK_TO_DEBUGGER();
325                         return NULL;
326                 }
327
328                 dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
329                 return &clk_mgr->base.base;
330         }
331                 break;
332         case AMDGPU_FAMILY_GC_11_0_0: {
333             struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
334
335             if (clk_mgr == NULL) {
336                 BREAK_TO_DEBUGGER();
337                 return NULL;
338             }
339
340             dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
341             return &clk_mgr->base;
342             break;
343         }
344
345         case AMDGPU_FAMILY_GC_11_0_1: {
346                 struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
347
348                 if (clk_mgr == NULL) {
349                         BREAK_TO_DEBUGGER();
350                         return NULL;
351                 }
352
353                 dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
354                 return &clk_mgr->base.base;
355         }
356         break;
357
358         case AMDGPU_FAMILY_GC_11_5_0: {
359                 struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
360
361                 if (clk_mgr == NULL) {
362                         BREAK_TO_DEBUGGER();
363                         return NULL;
364                 }
365
366                 dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
367                 return &clk_mgr->base.base;
368         }
369         break;
370
371 #endif /* CONFIG_DRM_AMD_DC_FP - Family RV */
372         default:
373                 ASSERT(0); /* Unknown Asic */
374                 break;
375         }
376
377         return NULL;
378 }
379
380 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
381 {
382         struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
383
384 #ifdef CONFIG_DRM_AMD_DC_FP
385         switch (clk_mgr_base->ctx->asic_id.chip_family) {
386         case FAMILY_NV:
387                 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
388                         dcn3_clk_mgr_destroy(clk_mgr);
389                 } else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
390                         dcn3_clk_mgr_destroy(clk_mgr);
391                 }
392                 if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
393                         dcn3_clk_mgr_destroy(clk_mgr);
394                 }
395                 break;
396
397         case FAMILY_VGH:
398                 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
399                         vg_clk_mgr_destroy(clk_mgr);
400                 break;
401
402         case FAMILY_YELLOW_CARP:
403                 dcn31_clk_mgr_destroy(clk_mgr);
404                 break;
405
406         case AMDGPU_FAMILY_GC_10_3_6:
407                 dcn315_clk_mgr_destroy(clk_mgr);
408                 break;
409
410         case AMDGPU_FAMILY_GC_10_3_7:
411                 dcn316_clk_mgr_destroy(clk_mgr);
412                 break;
413
414         case AMDGPU_FAMILY_GC_11_0_0:
415                 dcn32_clk_mgr_destroy(clk_mgr);
416                 break;
417
418         case AMDGPU_FAMILY_GC_11_0_1:
419                 dcn314_clk_mgr_destroy(clk_mgr);
420                 break;
421
422         case AMDGPU_FAMILY_GC_11_5_0:
423                 dcn35_clk_mgr_destroy(clk_mgr);
424                 break;
425
426         default:
427                 break;
428         }
429 #endif /* CONFIG_DRM_AMD_DC_FP */
430
431         kfree(clk_mgr);
432 }
433
This page took 0.057606 seconds and 4 git commands to generate.