2 * Copyright 2012-16 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include <linux/slab.h>
28 #include "dal_asic_id.h"
31 #include "clk_mgr_internal.h"
34 #include "dce100/dce_clk_mgr.h"
35 #include "dce110/dce110_clk_mgr.h"
36 #include "dce112/dce112_clk_mgr.h"
37 #include "dce120/dce120_clk_mgr.h"
38 #include "dce60/dce60_clk_mgr.h"
39 #include "dcn10/rv1_clk_mgr.h"
40 #include "dcn10/rv2_clk_mgr.h"
41 #include "dcn20/dcn20_clk_mgr.h"
42 #include "dcn21/rn_clk_mgr.h"
43 #include "dcn201/dcn201_clk_mgr.h"
44 #include "dcn30/dcn30_clk_mgr.h"
45 #include "dcn301/vg_clk_mgr.h"
46 #include "dcn31/dcn31_clk_mgr.h"
47 #include "dcn314/dcn314_clk_mgr.h"
48 #include "dcn315/dcn315_clk_mgr.h"
49 #include "dcn316/dcn316_clk_mgr.h"
50 #include "dcn32/dcn32_clk_mgr.h"
51 #include "dcn35/dcn35_clk_mgr.h"
53 int clk_mgr_helper_get_active_display_cnt(
55 struct dc_state *context)
60 for (i = 0; i < context->stream_count; i++) {
61 const struct dc_stream_state *stream = context->streams[i];
63 /* Don't count SubVP phantom pipes as part of active
66 if (stream->mall_stream_config.type == SUBVP_PHANTOM)
70 * Only notify active stream or virtual stream.
71 * Need to notify virtual stream to work around
72 * headless case. HPD does not fire when system is in
75 if (!stream->dpms_off || stream->signal == SIGNAL_TYPE_VIRTUAL)
82 int clk_mgr_helper_get_active_plane_cnt(
84 struct dc_state *context)
86 int i, total_plane_count;
88 total_plane_count = 0;
89 for (i = 0; i < context->stream_count; i++) {
90 const struct dc_stream_status stream_status = context->stream_status[i];
93 * Sum up plane_count for all streams ( active and virtual ).
95 total_plane_count += stream_status.plane_count;
98 return total_plane_count;
101 void clk_mgr_exit_optimized_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
103 struct dc_link *edp_links[MAX_NUM_EDP];
104 struct dc_link *edp_link = NULL;
106 unsigned int panel_inst;
108 dc_get_edp_links(dc, edp_links, &edp_num);
109 if (dc->hwss.exit_optimized_pwr_state)
110 dc->hwss.exit_optimized_pwr_state(dc, dc->current_state);
113 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
114 bool allow_active = false;
116 edp_link = edp_links[panel_inst];
117 if (!edp_link->psr_settings.psr_feature_enabled)
119 clk_mgr->psr_allow_active_cache = edp_link->psr_settings.psr_allow_active;
120 dc->link_srv->edp_set_psr_allow_active(edp_link, &allow_active, false, false, NULL);
121 dc->link_srv->edp_set_replay_allow_active(edp_link, &allow_active, false, false, NULL);
127 void clk_mgr_optimize_pwr_state(const struct dc *dc, struct clk_mgr *clk_mgr)
129 struct dc_link *edp_links[MAX_NUM_EDP];
130 struct dc_link *edp_link = NULL;
132 unsigned int panel_inst;
134 dc_get_edp_links(dc, edp_links, &edp_num);
136 for (panel_inst = 0; panel_inst < edp_num; panel_inst++) {
137 edp_link = edp_links[panel_inst];
138 if (!edp_link->psr_settings.psr_feature_enabled)
140 dc->link_srv->edp_set_psr_allow_active(edp_link,
141 &clk_mgr->psr_allow_active_cache, false, false, NULL);
142 dc->link_srv->edp_set_replay_allow_active(edp_link,
143 &clk_mgr->psr_allow_active_cache, false, false, NULL);
147 if (dc->hwss.optimize_pwr_state)
148 dc->hwss.optimize_pwr_state(dc, dc->current_state);
152 struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
154 struct hw_asic_id asic_id = ctx->asic_id;
156 switch (asic_id.chip_family) {
157 #if defined(CONFIG_DRM_AMD_DC_SI)
159 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
161 if (clk_mgr == NULL) {
165 dce60_clk_mgr_construct(ctx, clk_mgr);
166 dce_clk_mgr_construct(ctx, clk_mgr);
167 return &clk_mgr->base;
172 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
174 if (clk_mgr == NULL) {
178 dce_clk_mgr_construct(ctx, clk_mgr);
179 return &clk_mgr->base;
182 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
184 if (clk_mgr == NULL) {
188 dce110_clk_mgr_construct(ctx, clk_mgr);
189 return &clk_mgr->base;
192 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
194 if (clk_mgr == NULL) {
198 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
199 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
200 dce_clk_mgr_construct(ctx, clk_mgr);
201 return &clk_mgr->base;
203 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
204 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
205 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
206 dce112_clk_mgr_construct(ctx, clk_mgr);
207 return &clk_mgr->base;
209 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev)) {
210 dce112_clk_mgr_construct(ctx, clk_mgr);
211 return &clk_mgr->base;
213 return &clk_mgr->base;
216 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
218 if (clk_mgr == NULL) {
222 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
223 dce121_clk_mgr_construct(ctx, clk_mgr);
225 dce120_clk_mgr_construct(ctx, clk_mgr);
226 return &clk_mgr->base;
228 #if defined(CONFIG_DRM_AMD_DC_FP)
230 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
232 if (clk_mgr == NULL) {
237 if (ASICREV_IS_RENOIR(asic_id.hw_internal_rev)) {
238 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
239 return &clk_mgr->base;
242 if (ASICREV_IS_GREEN_SARDINE(asic_id.hw_internal_rev)) {
243 rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
244 return &clk_mgr->base;
246 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev)) {
247 rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
248 return &clk_mgr->base;
250 if (ASICREV_IS_RAVEN(asic_id.hw_internal_rev) ||
251 ASICREV_IS_PICASSO(asic_id.hw_internal_rev)) {
252 rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
253 return &clk_mgr->base;
255 return &clk_mgr->base;
258 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
260 if (clk_mgr == NULL) {
264 if (ASICREV_IS_SIENNA_CICHLID_P(asic_id.hw_internal_rev)) {
265 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
266 return &clk_mgr->base;
268 if (ASICREV_IS_DIMGREY_CAVEFISH_P(asic_id.hw_internal_rev)) {
269 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
270 return &clk_mgr->base;
272 if (ASICREV_IS_BEIGE_GOBY_P(asic_id.hw_internal_rev)) {
273 dcn3_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
274 return &clk_mgr->base;
276 if (asic_id.chip_id == DEVICE_ID_NV_13FE) {
277 dcn201_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
278 return &clk_mgr->base;
280 dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
281 return &clk_mgr->base;
284 if (ASICREV_IS_VANGOGH(asic_id.hw_internal_rev)) {
285 struct clk_mgr_vgh *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
287 if (clk_mgr == NULL) {
291 vg_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
292 return &clk_mgr->base.base;
296 case FAMILY_YELLOW_CARP: {
297 struct clk_mgr_dcn31 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
299 if (clk_mgr == NULL) {
304 dcn31_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
305 return &clk_mgr->base.base;
308 case AMDGPU_FAMILY_GC_10_3_6: {
309 struct clk_mgr_dcn315 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
311 if (clk_mgr == NULL) {
316 dcn315_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
317 return &clk_mgr->base.base;
320 case AMDGPU_FAMILY_GC_10_3_7: {
321 struct clk_mgr_dcn316 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
323 if (clk_mgr == NULL) {
328 dcn316_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
329 return &clk_mgr->base.base;
332 case AMDGPU_FAMILY_GC_11_0_0: {
333 struct clk_mgr_internal *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
335 if (clk_mgr == NULL) {
340 dcn32_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
341 return &clk_mgr->base;
345 case AMDGPU_FAMILY_GC_11_0_1: {
346 struct clk_mgr_dcn314 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
348 if (clk_mgr == NULL) {
353 dcn314_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
354 return &clk_mgr->base.base;
358 case AMDGPU_FAMILY_GC_11_5_0: {
359 struct clk_mgr_dcn35 *clk_mgr = kzalloc(sizeof(*clk_mgr), GFP_KERNEL);
361 if (clk_mgr == NULL) {
366 dcn35_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
367 return &clk_mgr->base.base;
371 #endif /* CONFIG_DRM_AMD_DC_FP - Family RV */
373 ASSERT(0); /* Unknown Asic */
380 void dc_destroy_clk_mgr(struct clk_mgr *clk_mgr_base)
382 struct clk_mgr_internal *clk_mgr = TO_CLK_MGR_INTERNAL(clk_mgr_base);
384 #ifdef CONFIG_DRM_AMD_DC_FP
385 switch (clk_mgr_base->ctx->asic_id.chip_family) {
387 if (ASICREV_IS_SIENNA_CICHLID_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
388 dcn3_clk_mgr_destroy(clk_mgr);
389 } else if (ASICREV_IS_DIMGREY_CAVEFISH_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
390 dcn3_clk_mgr_destroy(clk_mgr);
392 if (ASICREV_IS_BEIGE_GOBY_P(clk_mgr_base->ctx->asic_id.hw_internal_rev)) {
393 dcn3_clk_mgr_destroy(clk_mgr);
398 if (ASICREV_IS_VANGOGH(clk_mgr_base->ctx->asic_id.hw_internal_rev))
399 vg_clk_mgr_destroy(clk_mgr);
402 case FAMILY_YELLOW_CARP:
403 dcn31_clk_mgr_destroy(clk_mgr);
406 case AMDGPU_FAMILY_GC_10_3_6:
407 dcn315_clk_mgr_destroy(clk_mgr);
410 case AMDGPU_FAMILY_GC_10_3_7:
411 dcn316_clk_mgr_destroy(clk_mgr);
414 case AMDGPU_FAMILY_GC_11_0_0:
415 dcn32_clk_mgr_destroy(clk_mgr);
418 case AMDGPU_FAMILY_GC_11_0_1:
419 dcn314_clk_mgr_destroy(clk_mgr);
422 case AMDGPU_FAMILY_GC_11_5_0:
423 dcn35_clk_mgr_destroy(clk_mgr);
429 #endif /* CONFIG_DRM_AMD_DC_FP */