1 // SPDX-License-Identifier: MIT
3 * Copyright © 2023 Intel Corporation
6 #include <linux/log2.h>
7 #include <linux/math64.h>
9 #include "intel_cx0_phy.h"
10 #include "intel_cx0_phy_regs.h"
11 #include "intel_ddi.h"
12 #include "intel_ddi_buf_trans.h"
14 #include "intel_display_types.h"
16 #include "intel_hdmi.h"
17 #include "intel_panel.h"
18 #include "intel_psr.h"
21 #define MB_WRITE_COMMITTED true
22 #define MB_WRITE_UNCOMMITTED false
24 #define for_each_cx0_lane_in_mask(__lane_mask, __lane) \
25 for ((__lane) = 0; (__lane) < 2; (__lane)++) \
26 for_each_if((__lane_mask) & BIT(__lane))
28 #define INTEL_CX0_LANE0 BIT(0)
29 #define INTEL_CX0_LANE1 BIT(1)
30 #define INTEL_CX0_BOTH_LANES (INTEL_CX0_LANE1 | INTEL_CX0_LANE0)
32 bool intel_is_c10phy(struct drm_i915_private *i915, enum phy phy)
34 if (DISPLAY_VER_FULL(i915) == IP_VER(14, 0) && phy < PHY_C)
40 static int lane_mask_to_lane(u8 lane_mask)
42 if (WARN_ON((lane_mask & ~INTEL_CX0_BOTH_LANES) ||
43 hweight8(lane_mask) != 1))
46 return ilog2(lane_mask);
49 static u8 intel_cx0_get_owned_lane_mask(struct drm_i915_private *i915,
50 struct intel_encoder *encoder)
52 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
54 if (!intel_tc_port_in_dp_alt_mode(dig_port))
55 return INTEL_CX0_BOTH_LANES;
58 * In DP-alt with pin assignment D, only PHY lane 0 is owned
59 * by display and lane 1 is owned by USB.
61 return intel_tc_port_max_lane_count(dig_port) > 2
62 ? INTEL_CX0_BOTH_LANES : INTEL_CX0_LANE0;
66 assert_dc_off(struct drm_i915_private *i915)
70 enabled = intel_display_power_is_enabled(i915, POWER_DOMAIN_DC_OFF);
71 drm_WARN_ON(&i915->drm, !enabled);
74 static void intel_cx0_program_msgbus_timer(struct intel_encoder *encoder)
77 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
79 for_each_cx0_lane_in_mask(INTEL_CX0_BOTH_LANES, lane)
81 XELPDP_PORT_MSGBUS_TIMER(encoder->port, lane),
82 XELPDP_PORT_MSGBUS_TIMER_VAL_MASK,
83 XELPDP_PORT_MSGBUS_TIMER_VAL);
87 * Prepare HW for CX0 phy transactions.
89 * It is required that PSR and DC5/6 are disabled before any CX0 message
90 * bus transaction is executed.
92 * We also do the msgbus timer programming here to ensure that the timer
93 * is already programmed before any access to the msgbus.
95 static intel_wakeref_t intel_cx0_phy_transaction_begin(struct intel_encoder *encoder)
97 intel_wakeref_t wakeref;
98 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
99 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
101 intel_psr_pause(intel_dp);
102 wakeref = intel_display_power_get(i915, POWER_DOMAIN_DC_OFF);
103 intel_cx0_program_msgbus_timer(encoder);
108 static void intel_cx0_phy_transaction_end(struct intel_encoder *encoder, intel_wakeref_t wakeref)
110 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
111 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
113 intel_psr_resume(intel_dp);
114 intel_display_power_put(i915, POWER_DOMAIN_DC_OFF, wakeref);
117 static void intel_clear_response_ready_flag(struct drm_i915_private *i915,
118 enum port port, int lane)
120 intel_de_rmw(i915, XELPDP_PORT_P2M_MSGBUS_STATUS(port, lane),
121 0, XELPDP_PORT_P2M_RESPONSE_READY | XELPDP_PORT_P2M_ERROR_SET);
124 static void intel_cx0_bus_reset(struct drm_i915_private *i915, enum port port, int lane)
126 enum phy phy = intel_port_to_phy(i915, port);
128 intel_de_write(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
129 XELPDP_PORT_M2P_TRANSACTION_RESET);
131 if (intel_de_wait_for_clear(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
132 XELPDP_PORT_M2P_TRANSACTION_RESET,
133 XELPDP_MSGBUS_TIMEOUT_SLOW)) {
134 drm_err_once(&i915->drm, "Failed to bring PHY %c to idle.\n", phy_name(phy));
138 intel_clear_response_ready_flag(i915, port, lane);
141 static int intel_cx0_wait_for_ack(struct drm_i915_private *i915, enum port port,
142 int command, int lane, u32 *val)
144 enum phy phy = intel_port_to_phy(i915, port);
146 if (__intel_de_wait_for_register(i915,
147 XELPDP_PORT_P2M_MSGBUS_STATUS(port, lane),
148 XELPDP_PORT_P2M_RESPONSE_READY,
149 XELPDP_PORT_P2M_RESPONSE_READY,
150 XELPDP_MSGBUS_TIMEOUT_FAST_US,
151 XELPDP_MSGBUS_TIMEOUT_SLOW, val)) {
152 drm_dbg_kms(&i915->drm, "PHY %c Timeout waiting for message ACK. Status: 0x%x\n",
153 phy_name(phy), *val);
155 if (!(intel_de_read(i915, XELPDP_PORT_MSGBUS_TIMER(port, lane)) &
156 XELPDP_PORT_MSGBUS_TIMER_TIMED_OUT))
157 drm_dbg_kms(&i915->drm,
158 "PHY %c Hardware did not detect a timeout\n",
161 intel_cx0_bus_reset(i915, port, lane);
165 if (*val & XELPDP_PORT_P2M_ERROR_SET) {
166 drm_dbg_kms(&i915->drm, "PHY %c Error occurred during %s command. Status: 0x%x\n", phy_name(phy),
167 command == XELPDP_PORT_P2M_COMMAND_READ_ACK ? "read" : "write", *val);
168 intel_cx0_bus_reset(i915, port, lane);
172 if (REG_FIELD_GET(XELPDP_PORT_P2M_COMMAND_TYPE_MASK, *val) != command) {
173 drm_dbg_kms(&i915->drm, "PHY %c Not a %s response. MSGBUS Status: 0x%x.\n", phy_name(phy),
174 command == XELPDP_PORT_P2M_COMMAND_READ_ACK ? "read" : "write", *val);
175 intel_cx0_bus_reset(i915, port, lane);
182 static int __intel_cx0_read_once(struct drm_i915_private *i915, enum port port,
185 enum phy phy = intel_port_to_phy(i915, port);
189 if (intel_de_wait_for_clear(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
190 XELPDP_PORT_M2P_TRANSACTION_PENDING,
191 XELPDP_MSGBUS_TIMEOUT_SLOW)) {
192 drm_dbg_kms(&i915->drm,
193 "PHY %c Timeout waiting for previous transaction to complete. Reset the bus and retry.\n", phy_name(phy));
194 intel_cx0_bus_reset(i915, port, lane);
198 intel_de_write(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
199 XELPDP_PORT_M2P_TRANSACTION_PENDING |
200 XELPDP_PORT_M2P_COMMAND_READ |
201 XELPDP_PORT_M2P_ADDRESS(addr));
203 ack = intel_cx0_wait_for_ack(i915, port, XELPDP_PORT_P2M_COMMAND_READ_ACK, lane, &val);
207 intel_clear_response_ready_flag(i915, port, lane);
209 return REG_FIELD_GET(XELPDP_PORT_P2M_DATA_MASK, val);
212 static u8 __intel_cx0_read(struct drm_i915_private *i915, enum port port,
215 enum phy phy = intel_port_to_phy(i915, port);
220 /* 3 tries is assumed to be enough to read successfully */
221 for (i = 0; i < 3; i++) {
222 status = __intel_cx0_read_once(i915, port, lane, addr);
228 drm_err_once(&i915->drm, "PHY %c Read %04x failed after %d retries.\n",
229 phy_name(phy), addr, i);
234 static u8 intel_cx0_read(struct drm_i915_private *i915, enum port port,
235 u8 lane_mask, u16 addr)
237 int lane = lane_mask_to_lane(lane_mask);
239 return __intel_cx0_read(i915, port, lane, addr);
242 static int __intel_cx0_write_once(struct drm_i915_private *i915, enum port port,
243 int lane, u16 addr, u8 data, bool committed)
245 enum phy phy = intel_port_to_phy(i915, port);
249 if (intel_de_wait_for_clear(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
250 XELPDP_PORT_M2P_TRANSACTION_PENDING,
251 XELPDP_MSGBUS_TIMEOUT_SLOW)) {
252 drm_dbg_kms(&i915->drm,
253 "PHY %c Timeout waiting for previous transaction to complete. Resetting the bus.\n", phy_name(phy));
254 intel_cx0_bus_reset(i915, port, lane);
258 intel_de_write(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
259 XELPDP_PORT_M2P_TRANSACTION_PENDING |
260 (committed ? XELPDP_PORT_M2P_COMMAND_WRITE_COMMITTED :
261 XELPDP_PORT_M2P_COMMAND_WRITE_UNCOMMITTED) |
262 XELPDP_PORT_M2P_DATA(data) |
263 XELPDP_PORT_M2P_ADDRESS(addr));
265 if (intel_de_wait_for_clear(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
266 XELPDP_PORT_M2P_TRANSACTION_PENDING,
267 XELPDP_MSGBUS_TIMEOUT_SLOW)) {
268 drm_dbg_kms(&i915->drm,
269 "PHY %c Timeout waiting for write to complete. Resetting the bus.\n", phy_name(phy));
270 intel_cx0_bus_reset(i915, port, lane);
275 ack = intel_cx0_wait_for_ack(i915, port, XELPDP_PORT_P2M_COMMAND_WRITE_ACK, lane, &val);
278 } else if ((intel_de_read(i915, XELPDP_PORT_P2M_MSGBUS_STATUS(port, lane)) &
279 XELPDP_PORT_P2M_ERROR_SET)) {
280 drm_dbg_kms(&i915->drm,
281 "PHY %c Error occurred during write command.\n", phy_name(phy));
282 intel_cx0_bus_reset(i915, port, lane);
286 intel_clear_response_ready_flag(i915, port, lane);
291 static void __intel_cx0_write(struct drm_i915_private *i915, enum port port,
292 int lane, u16 addr, u8 data, bool committed)
294 enum phy phy = intel_port_to_phy(i915, port);
299 /* 3 tries is assumed to be enough to write successfully */
300 for (i = 0; i < 3; i++) {
301 status = __intel_cx0_write_once(i915, port, lane, addr, data, committed);
307 drm_err_once(&i915->drm,
308 "PHY %c Write %04x failed after %d retries.\n", phy_name(phy), addr, i);
311 static void intel_cx0_write(struct drm_i915_private *i915, enum port port,
312 u8 lane_mask, u16 addr, u8 data, bool committed)
316 for_each_cx0_lane_in_mask(lane_mask, lane)
317 __intel_cx0_write(i915, port, lane, addr, data, committed);
320 static void intel_c20_sram_write(struct drm_i915_private *i915, enum port port,
321 int lane, u16 addr, u16 data)
325 intel_cx0_write(i915, port, lane, PHY_C20_WR_ADDRESS_H, addr >> 8, 0);
326 intel_cx0_write(i915, port, lane, PHY_C20_WR_ADDRESS_L, addr & 0xff, 0);
328 intel_cx0_write(i915, port, lane, PHY_C20_WR_DATA_H, data >> 8, 0);
329 intel_cx0_write(i915, port, lane, PHY_C20_WR_DATA_L, data & 0xff, 1);
332 static u16 intel_c20_sram_read(struct drm_i915_private *i915, enum port port,
339 intel_cx0_write(i915, port, lane, PHY_C20_RD_ADDRESS_H, addr >> 8, 0);
340 intel_cx0_write(i915, port, lane, PHY_C20_RD_ADDRESS_L, addr & 0xff, 1);
342 val = intel_cx0_read(i915, port, lane, PHY_C20_RD_DATA_H);
344 val |= intel_cx0_read(i915, port, lane, PHY_C20_RD_DATA_L);
349 static void __intel_cx0_rmw(struct drm_i915_private *i915, enum port port,
350 int lane, u16 addr, u8 clear, u8 set, bool committed)
354 old = __intel_cx0_read(i915, port, lane, addr);
355 val = (old & ~clear) | set;
358 __intel_cx0_write(i915, port, lane, addr, val, committed);
361 static void intel_cx0_rmw(struct drm_i915_private *i915, enum port port,
362 u8 lane_mask, u16 addr, u8 clear, u8 set, bool committed)
366 for_each_cx0_lane_in_mask(lane_mask, lane)
367 __intel_cx0_rmw(i915, port, lane, addr, clear, set, committed);
370 static u8 intel_c10_get_tx_vboost_lvl(const struct intel_crtc_state *crtc_state)
372 if (intel_crtc_has_dp_encoder(crtc_state)) {
373 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
374 (crtc_state->port_clock == 540000 ||
375 crtc_state->port_clock == 810000))
384 static u8 intel_c10_get_tx_term_ctl(const struct intel_crtc_state *crtc_state)
386 if (intel_crtc_has_dp_encoder(crtc_state)) {
387 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
388 (crtc_state->port_clock == 540000 ||
389 crtc_state->port_clock == 810000))
398 void intel_cx0_phy_set_signal_levels(struct intel_encoder *encoder,
399 const struct intel_crtc_state *crtc_state)
401 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
402 const struct intel_ddi_buf_trans *trans;
403 enum phy phy = intel_port_to_phy(i915, encoder->port);
404 u8 owned_lane_mask = intel_cx0_get_owned_lane_mask(i915, encoder);
405 intel_wakeref_t wakeref;
408 wakeref = intel_cx0_phy_transaction_begin(encoder);
410 trans = encoder->get_buf_trans(encoder, crtc_state, &n_entries);
411 if (drm_WARN_ON_ONCE(&i915->drm, !trans)) {
412 intel_cx0_phy_transaction_end(encoder, wakeref);
416 if (intel_is_c10phy(i915, phy)) {
417 intel_cx0_rmw(i915, encoder->port, owned_lane_mask, PHY_C10_VDR_CONTROL(1),
418 0, C10_VDR_CTRL_MSGBUS_ACCESS, MB_WRITE_COMMITTED);
419 intel_cx0_rmw(i915, encoder->port, owned_lane_mask, PHY_C10_VDR_CMN(3),
420 C10_CMN3_TXVBOOST_MASK,
421 C10_CMN3_TXVBOOST(intel_c10_get_tx_vboost_lvl(crtc_state)),
422 MB_WRITE_UNCOMMITTED);
423 intel_cx0_rmw(i915, encoder->port, owned_lane_mask, PHY_C10_VDR_TX(1),
424 C10_TX1_TERMCTL_MASK,
425 C10_TX1_TERMCTL(intel_c10_get_tx_term_ctl(crtc_state)),
429 for (ln = 0; ln < crtc_state->lane_count; ln++) {
430 int level = intel_ddi_level(encoder, crtc_state, ln);
433 u8 lane_mask = lane == 0 ? INTEL_CX0_LANE0 : INTEL_CX0_LANE1;
435 if (!(lane_mask & owned_lane_mask))
438 intel_cx0_rmw(i915, encoder->port, lane_mask, PHY_CX0_VDROVRD_CTL(lane, tx, 0),
439 C10_PHY_OVRD_LEVEL_MASK,
440 C10_PHY_OVRD_LEVEL(trans->entries[level].snps.pre_cursor),
442 intel_cx0_rmw(i915, encoder->port, lane_mask, PHY_CX0_VDROVRD_CTL(lane, tx, 1),
443 C10_PHY_OVRD_LEVEL_MASK,
444 C10_PHY_OVRD_LEVEL(trans->entries[level].snps.vswing),
446 intel_cx0_rmw(i915, encoder->port, lane_mask, PHY_CX0_VDROVRD_CTL(lane, tx, 2),
447 C10_PHY_OVRD_LEVEL_MASK,
448 C10_PHY_OVRD_LEVEL(trans->entries[level].snps.post_cursor),
452 /* Write Override enables in 0xD71 */
453 intel_cx0_rmw(i915, encoder->port, owned_lane_mask, PHY_C10_VDR_OVRD,
454 0, PHY_C10_VDR_OVRD_TX1 | PHY_C10_VDR_OVRD_TX2,
457 if (intel_is_c10phy(i915, phy))
458 intel_cx0_rmw(i915, encoder->port, owned_lane_mask, PHY_C10_VDR_CONTROL(1),
459 0, C10_VDR_CTRL_UPDATE_CFG, MB_WRITE_COMMITTED);
461 intel_cx0_phy_transaction_end(encoder, wakeref);
465 * Basic DP link rates with 38.4 MHz reference clock.
466 * Note: The tables below are with SSC. In non-ssc
467 * registers 0xC04 to 0xC08(pll[4] to pll[8]) will be
471 static const struct intel_c10pll_state mtl_c10_dp_rbr = {
497 static const struct intel_c10pll_state mtl_c10_edp_r216 = {
523 static const struct intel_c10pll_state mtl_c10_edp_r243 = {
549 static const struct intel_c10pll_state mtl_c10_dp_hbr1 = {
561 .pll[8] = 0x1, /* Verify */
575 static const struct intel_c10pll_state mtl_c10_edp_r324 = {
601 static const struct intel_c10pll_state mtl_c10_edp_r432 = {
627 static const struct intel_c10pll_state mtl_c10_dp_hbr2 = {
653 static const struct intel_c10pll_state mtl_c10_edp_r675 = {
679 static const struct intel_c10pll_state mtl_c10_dp_hbr3 = {
705 static const struct intel_c10pll_state * const mtl_c10_dp_tables[] = {
713 static const struct intel_c10pll_state * const mtl_c10_edp_tables[] = {
726 /* C20 basic DP 1.4 tables */
727 static const struct intel_c20pll_state mtl_c20_dp_rbr = {
728 .link_bit_rate = 162000,
730 .tx = { 0xbe88, /* tx cfg0 */
731 0x5800, /* tx cfg1 */
732 0x0000, /* tx cfg2 */
734 .cmn = {0x0500, /* cmn cfg0*/
735 0x0005, /* cmn cfg1 */
736 0x0000, /* cmn cfg2 */
737 0x0000, /* cmn cfg3 */
739 .mpllb = { 0x50a8, /* mpllb cfg0 */
740 0x2120, /* mpllb cfg1 */
741 0xcd9a, /* mpllb cfg2 */
742 0xbfc1, /* mpllb cfg3 */
743 0x5ab8, /* mpllb cfg4 */
744 0x4c34, /* mpllb cfg5 */
745 0x2000, /* mpllb cfg6 */
746 0x0001, /* mpllb cfg7 */
747 0x6000, /* mpllb cfg8 */
748 0x0000, /* mpllb cfg9 */
749 0x0000, /* mpllb cfg10 */
753 static const struct intel_c20pll_state mtl_c20_dp_hbr1 = {
754 .link_bit_rate = 270000,
756 .tx = { 0xbe88, /* tx cfg0 */
757 0x4800, /* tx cfg1 */
758 0x0000, /* tx cfg2 */
760 .cmn = {0x0500, /* cmn cfg0*/
761 0x0005, /* cmn cfg1 */
762 0x0000, /* cmn cfg2 */
763 0x0000, /* cmn cfg3 */
765 .mpllb = { 0x308c, /* mpllb cfg0 */
766 0x2110, /* mpllb cfg1 */
767 0xcc9c, /* mpllb cfg2 */
768 0xbfc1, /* mpllb cfg3 */
769 0x4b9a, /* mpllb cfg4 */
770 0x3f81, /* mpllb cfg5 */
771 0x2000, /* mpllb cfg6 */
772 0x0001, /* mpllb cfg7 */
773 0x5000, /* mpllb cfg8 */
774 0x0000, /* mpllb cfg9 */
775 0x0000, /* mpllb cfg10 */
779 static const struct intel_c20pll_state mtl_c20_dp_hbr2 = {
780 .link_bit_rate = 540000,
782 .tx = { 0xbe88, /* tx cfg0 */
783 0x4800, /* tx cfg1 */
784 0x0000, /* tx cfg2 */
786 .cmn = {0x0500, /* cmn cfg0*/
787 0x0005, /* cmn cfg1 */
788 0x0000, /* cmn cfg2 */
789 0x0000, /* cmn cfg3 */
791 .mpllb = { 0x108c, /* mpllb cfg0 */
792 0x2108, /* mpllb cfg1 */
793 0xcc9c, /* mpllb cfg2 */
794 0xbfc1, /* mpllb cfg3 */
795 0x4b9a, /* mpllb cfg4 */
796 0x3f81, /* mpllb cfg5 */
797 0x2000, /* mpllb cfg6 */
798 0x0001, /* mpllb cfg7 */
799 0x5000, /* mpllb cfg8 */
800 0x0000, /* mpllb cfg9 */
801 0x0000, /* mpllb cfg10 */
805 static const struct intel_c20pll_state mtl_c20_dp_hbr3 = {
806 .link_bit_rate = 810000,
808 .tx = { 0xbe88, /* tx cfg0 */
809 0x4800, /* tx cfg1 */
810 0x0000, /* tx cfg2 */
812 .cmn = {0x0500, /* cmn cfg0*/
813 0x0005, /* cmn cfg1 */
814 0x0000, /* cmn cfg2 */
815 0x0000, /* cmn cfg3 */
817 .mpllb = { 0x10d2, /* mpllb cfg0 */
818 0x2108, /* mpllb cfg1 */
819 0x8d98, /* mpllb cfg2 */
820 0xbfc1, /* mpllb cfg3 */
821 0x7166, /* mpllb cfg4 */
822 0x5f42, /* mpllb cfg5 */
823 0x2000, /* mpllb cfg6 */
824 0x0001, /* mpllb cfg7 */
825 0x7800, /* mpllb cfg8 */
826 0x0000, /* mpllb cfg9 */
827 0x0000, /* mpllb cfg10 */
831 /* C20 basic DP 2.0 tables */
832 static const struct intel_c20pll_state mtl_c20_dp_uhbr10 = {
833 .link_bit_rate = 1000000, /* 10 Gbps */
835 .tx = { 0xbe21, /* tx cfg0 */
836 0x4800, /* tx cfg1 */
837 0x0000, /* tx cfg2 */
839 .cmn = {0x0500, /* cmn cfg0*/
840 0x0005, /* cmn cfg1 */
841 0x0000, /* cmn cfg2 */
842 0x0000, /* cmn cfg3 */
844 .mplla = { 0x3104, /* mplla cfg0 */
845 0xd105, /* mplla cfg1 */
846 0xc025, /* mplla cfg2 */
847 0xc025, /* mplla cfg3 */
848 0x8c00, /* mplla cfg4 */
849 0x759a, /* mplla cfg5 */
850 0x4000, /* mplla cfg6 */
851 0x0003, /* mplla cfg7 */
852 0x3555, /* mplla cfg8 */
853 0x0001, /* mplla cfg9 */
857 static const struct intel_c20pll_state mtl_c20_dp_uhbr13_5 = {
858 .link_bit_rate = 1350000, /* 13.5 Gbps */
860 .tx = { 0xbea0, /* tx cfg0 */
861 0x4800, /* tx cfg1 */
862 0x0000, /* tx cfg2 */
864 .cmn = {0x0500, /* cmn cfg0*/
865 0x0005, /* cmn cfg1 */
866 0x0000, /* cmn cfg2 */
867 0x0000, /* cmn cfg3 */
869 .mpllb = { 0x015f, /* mpllb cfg0 */
870 0x2205, /* mpllb cfg1 */
871 0x1b17, /* mpllb cfg2 */
872 0xffc1, /* mpllb cfg3 */
873 0xe100, /* mpllb cfg4 */
874 0xbd00, /* mpllb cfg5 */
875 0x2000, /* mpllb cfg6 */
876 0x0001, /* mpllb cfg7 */
877 0x4800, /* mpllb cfg8 */
878 0x0000, /* mpllb cfg9 */
879 0x0000, /* mpllb cfg10 */
883 static const struct intel_c20pll_state mtl_c20_dp_uhbr20 = {
884 .link_bit_rate = 2000000, /* 20 Gbps */
886 .tx = { 0xbe20, /* tx cfg0 */
887 0x4800, /* tx cfg1 */
888 0x0000, /* tx cfg2 */
890 .cmn = {0x0500, /* cmn cfg0*/
891 0x0005, /* cmn cfg1 */
892 0x0000, /* cmn cfg2 */
893 0x0000, /* cmn cfg3 */
895 .mplla = { 0x3104, /* mplla cfg0 */
896 0xd105, /* mplla cfg1 */
897 0xc025, /* mplla cfg2 */
898 0xc025, /* mplla cfg3 */
899 0xa6ab, /* mplla cfg4 */
900 0x8c00, /* mplla cfg5 */
901 0x4000, /* mplla cfg6 */
902 0x0003, /* mplla cfg7 */
903 0x3555, /* mplla cfg8 */
904 0x0001, /* mplla cfg9 */
908 static const struct intel_c20pll_state * const mtl_c20_dp_tables[] = {
914 &mtl_c20_dp_uhbr13_5,
920 * HDMI link rates with 38.4 MHz reference clock.
923 static const struct intel_c10pll_state mtl_c10_hdmi_25_2 = {
949 static const struct intel_c10pll_state mtl_c10_hdmi_27_0 = {
975 static const struct intel_c10pll_state mtl_c10_hdmi_74_25 = {
1001 static const struct intel_c10pll_state mtl_c10_hdmi_148_5 = {
1027 static const struct intel_c10pll_state mtl_c10_hdmi_594 = {
1053 /* Precomputed C10 HDMI PLL tables */
1054 static const struct intel_c10pll_state mtl_c10_hdmi_27027 = {
1058 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xC0, .pll[3] = 0x00, .pll[4] = 0x00,
1059 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1060 .pll[10] = 0xFF, .pll[11] = 0xCC, .pll[12] = 0x9C, .pll[13] = 0xCB, .pll[14] = 0xCC,
1061 .pll[15] = 0x0D, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1064 static const struct intel_c10pll_state mtl_c10_hdmi_28320 = {
1068 .pll[0] = 0x04, .pll[1] = 0x00, .pll[2] = 0xCC, .pll[3] = 0x00, .pll[4] = 0x00,
1069 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1070 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x00, .pll[13] = 0x00, .pll[14] = 0x00,
1071 .pll[15] = 0x0D, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1074 static const struct intel_c10pll_state mtl_c10_hdmi_30240 = {
1078 .pll[0] = 0x04, .pll[1] = 0x00, .pll[2] = 0xDC, .pll[3] = 0x00, .pll[4] = 0x00,
1079 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1080 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x00, .pll[13] = 0x00, .pll[14] = 0x00,
1081 .pll[15] = 0x0D, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1084 static const struct intel_c10pll_state mtl_c10_hdmi_31500 = {
1088 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x62, .pll[3] = 0x00, .pll[4] = 0x00,
1089 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1090 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0xA0, .pll[13] = 0x00, .pll[14] = 0x00,
1091 .pll[15] = 0x0C, .pll[16] = 0x09, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1094 static const struct intel_c10pll_state mtl_c10_hdmi_36000 = {
1098 .pll[0] = 0xC4, .pll[1] = 0x00, .pll[2] = 0x76, .pll[3] = 0x00, .pll[4] = 0x00,
1099 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1100 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x00, .pll[13] = 0x00, .pll[14] = 0x00,
1101 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1104 static const struct intel_c10pll_state mtl_c10_hdmi_40000 = {
1108 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x86, .pll[3] = 0x00, .pll[4] = 0x00,
1109 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1110 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0x55, .pll[13] = 0x55, .pll[14] = 0x55,
1111 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1114 static const struct intel_c10pll_state mtl_c10_hdmi_49500 = {
1118 .pll[0] = 0x74, .pll[1] = 0x00, .pll[2] = 0xAE, .pll[3] = 0x00, .pll[4] = 0x00,
1119 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1120 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x20, .pll[13] = 0x00, .pll[14] = 0x00,
1121 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1124 static const struct intel_c10pll_state mtl_c10_hdmi_50000 = {
1128 .pll[0] = 0x74, .pll[1] = 0x00, .pll[2] = 0xB0, .pll[3] = 0x00, .pll[4] = 0x00,
1129 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1130 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0x2A, .pll[13] = 0xA9, .pll[14] = 0xAA,
1131 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1134 static const struct intel_c10pll_state mtl_c10_hdmi_57284 = {
1138 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xCE, .pll[3] = 0x00, .pll[4] = 0x00,
1139 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1140 .pll[10] = 0xFF, .pll[11] = 0x77, .pll[12] = 0x57, .pll[13] = 0x77, .pll[14] = 0x77,
1141 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1144 static const struct intel_c10pll_state mtl_c10_hdmi_58000 = {
1148 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xD0, .pll[3] = 0x00, .pll[4] = 0x00,
1149 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1150 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0xD5, .pll[13] = 0x55, .pll[14] = 0x55,
1151 .pll[15] = 0x0C, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1154 static const struct intel_c10pll_state mtl_c10_hdmi_65000 = {
1158 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x66, .pll[3] = 0x00, .pll[4] = 0x00,
1159 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1160 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0xB5, .pll[13] = 0x55, .pll[14] = 0x55,
1161 .pll[15] = 0x0B, .pll[16] = 0x09, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1164 static const struct intel_c10pll_state mtl_c10_hdmi_71000 = {
1168 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x72, .pll[3] = 0x00, .pll[4] = 0x00,
1169 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1170 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0xF5, .pll[13] = 0x55, .pll[14] = 0x55,
1171 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1174 static const struct intel_c10pll_state mtl_c10_hdmi_74176 = {
1178 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1179 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1180 .pll[10] = 0xFF, .pll[11] = 0x44, .pll[12] = 0x44, .pll[13] = 0x44, .pll[14] = 0x44,
1181 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1184 static const struct intel_c10pll_state mtl_c10_hdmi_75000 = {
1188 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7C, .pll[3] = 0x00, .pll[4] = 0x00,
1189 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1190 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x20, .pll[13] = 0x00, .pll[14] = 0x00,
1191 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1194 static const struct intel_c10pll_state mtl_c10_hdmi_78750 = {
1198 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x84, .pll[3] = 0x00, .pll[4] = 0x00,
1199 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1200 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x08, .pll[13] = 0x00, .pll[14] = 0x00,
1201 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1204 static const struct intel_c10pll_state mtl_c10_hdmi_85500 = {
1208 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x92, .pll[3] = 0x00, .pll[4] = 0x00,
1209 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1210 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x10, .pll[13] = 0x00, .pll[14] = 0x00,
1211 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1214 static const struct intel_c10pll_state mtl_c10_hdmi_88750 = {
1218 .pll[0] = 0x74, .pll[1] = 0x00, .pll[2] = 0x98, .pll[3] = 0x00, .pll[4] = 0x00,
1219 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1220 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0x72, .pll[13] = 0xA9, .pll[14] = 0xAA,
1221 .pll[15] = 0x0B, .pll[16] = 0x09, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1224 static const struct intel_c10pll_state mtl_c10_hdmi_106500 = {
1228 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xBC, .pll[3] = 0x00, .pll[4] = 0x00,
1229 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1230 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0xF0, .pll[13] = 0x00, .pll[14] = 0x00,
1231 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1234 static const struct intel_c10pll_state mtl_c10_hdmi_108000 = {
1238 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xC0, .pll[3] = 0x00, .pll[4] = 0x00,
1239 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1240 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x80, .pll[13] = 0x00, .pll[14] = 0x00,
1241 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1244 static const struct intel_c10pll_state mtl_c10_hdmi_115500 = {
1248 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xD0, .pll[3] = 0x00, .pll[4] = 0x00,
1249 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1250 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x50, .pll[13] = 0x00, .pll[14] = 0x00,
1251 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1254 static const struct intel_c10pll_state mtl_c10_hdmi_119000 = {
1258 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xD6, .pll[3] = 0x00, .pll[4] = 0x00,
1259 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1260 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0xF5, .pll[13] = 0x55, .pll[14] = 0x55,
1261 .pll[15] = 0x0B, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1264 static const struct intel_c10pll_state mtl_c10_hdmi_135000 = {
1268 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x6C, .pll[3] = 0x00, .pll[4] = 0x00,
1269 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1270 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x50, .pll[13] = 0x00, .pll[14] = 0x00,
1271 .pll[15] = 0x0A, .pll[16] = 0x09, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1274 static const struct intel_c10pll_state mtl_c10_hdmi_138500 = {
1278 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x70, .pll[3] = 0x00, .pll[4] = 0x00,
1279 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1280 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0x22, .pll[13] = 0xA9, .pll[14] = 0xAA,
1281 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1284 static const struct intel_c10pll_state mtl_c10_hdmi_147160 = {
1288 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x78, .pll[3] = 0x00, .pll[4] = 0x00,
1289 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1290 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0xA5, .pll[13] = 0x55, .pll[14] = 0x55,
1291 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1294 static const struct intel_c10pll_state mtl_c10_hdmi_148352 = {
1298 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1299 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1300 .pll[10] = 0xFF, .pll[11] = 0x44, .pll[12] = 0x44, .pll[13] = 0x44, .pll[14] = 0x44,
1301 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1304 static const struct intel_c10pll_state mtl_c10_hdmi_154000 = {
1308 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x80, .pll[3] = 0x00, .pll[4] = 0x00,
1309 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1310 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0x35, .pll[13] = 0x55, .pll[14] = 0x55,
1311 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1314 static const struct intel_c10pll_state mtl_c10_hdmi_162000 = {
1318 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x88, .pll[3] = 0x00, .pll[4] = 0x00,
1319 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1320 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x60, .pll[13] = 0x00, .pll[14] = 0x00,
1321 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1324 static const struct intel_c10pll_state mtl_c10_hdmi_167000 = {
1328 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x8C, .pll[3] = 0x00, .pll[4] = 0x00,
1329 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1330 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0xFA, .pll[13] = 0xA9, .pll[14] = 0xAA,
1331 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1334 static const struct intel_c10pll_state mtl_c10_hdmi_197802 = {
1338 .pll[0] = 0x74, .pll[1] = 0x00, .pll[2] = 0xAE, .pll[3] = 0x00, .pll[4] = 0x00,
1339 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1340 .pll[10] = 0xFF, .pll[11] = 0x99, .pll[12] = 0x05, .pll[13] = 0x98, .pll[14] = 0x99,
1341 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1344 static const struct intel_c10pll_state mtl_c10_hdmi_198000 = {
1348 .pll[0] = 0x74, .pll[1] = 0x00, .pll[2] = 0xAE, .pll[3] = 0x00, .pll[4] = 0x00,
1349 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1350 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x20, .pll[13] = 0x00, .pll[14] = 0x00,
1351 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1354 static const struct intel_c10pll_state mtl_c10_hdmi_209800 = {
1358 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xBA, .pll[3] = 0x00, .pll[4] = 0x00,
1359 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1360 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0x45, .pll[13] = 0x55, .pll[14] = 0x55,
1361 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1364 static const struct intel_c10pll_state mtl_c10_hdmi_241500 = {
1368 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xDA, .pll[3] = 0x00, .pll[4] = 0x00,
1369 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1370 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0xC8, .pll[13] = 0x00, .pll[14] = 0x00,
1371 .pll[15] = 0x0A, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1374 static const struct intel_c10pll_state mtl_c10_hdmi_262750 = {
1378 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x68, .pll[3] = 0x00, .pll[4] = 0x00,
1379 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1380 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0x6C, .pll[13] = 0xA9, .pll[14] = 0xAA,
1381 .pll[15] = 0x09, .pll[16] = 0x09, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1384 static const struct intel_c10pll_state mtl_c10_hdmi_268500 = {
1388 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x6A, .pll[3] = 0x00, .pll[4] = 0x00,
1389 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1390 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0xEC, .pll[13] = 0x00, .pll[14] = 0x00,
1391 .pll[15] = 0x09, .pll[16] = 0x09, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1394 static const struct intel_c10pll_state mtl_c10_hdmi_296703 = {
1398 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1399 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1400 .pll[10] = 0xFF, .pll[11] = 0x33, .pll[12] = 0x44, .pll[13] = 0x33, .pll[14] = 0x33,
1401 .pll[15] = 0x09, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1404 static const struct intel_c10pll_state mtl_c10_hdmi_297000 = {
1408 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1409 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1410 .pll[10] = 0xFF, .pll[11] = 0x00, .pll[12] = 0x58, .pll[13] = 0x00, .pll[14] = 0x00,
1411 .pll[15] = 0x09, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1414 static const struct intel_c10pll_state mtl_c10_hdmi_319750 = {
1418 .pll[0] = 0xB4, .pll[1] = 0x00, .pll[2] = 0x86, .pll[3] = 0x00, .pll[4] = 0x00,
1419 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1420 .pll[10] = 0xFF, .pll[11] = 0xAA, .pll[12] = 0x44, .pll[13] = 0xA9, .pll[14] = 0xAA,
1421 .pll[15] = 0x09, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1424 static const struct intel_c10pll_state mtl_c10_hdmi_497750 = {
1428 .pll[0] = 0x34, .pll[1] = 0x00, .pll[2] = 0xE2, .pll[3] = 0x00, .pll[4] = 0x00,
1429 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1430 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0x9F, .pll[13] = 0x55, .pll[14] = 0x55,
1431 .pll[15] = 0x09, .pll[16] = 0x08, .pll[17] = 0xCF, .pll[18] = 0x84, .pll[19] = 0x23,
1434 static const struct intel_c10pll_state mtl_c10_hdmi_592000 = {
1438 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1439 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1440 .pll[10] = 0xFF, .pll[11] = 0x55, .pll[12] = 0x15, .pll[13] = 0x55, .pll[14] = 0x55,
1441 .pll[15] = 0x08, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1444 static const struct intel_c10pll_state mtl_c10_hdmi_593407 = {
1448 .pll[0] = 0xF4, .pll[1] = 0x00, .pll[2] = 0x7A, .pll[3] = 0x00, .pll[4] = 0x00,
1449 .pll[5] = 0x00, .pll[6] = 0x00, .pll[7] = 0x00, .pll[8] = 0x20, .pll[9] = 0xFF,
1450 .pll[10] = 0xFF, .pll[11] = 0x3B, .pll[12] = 0x44, .pll[13] = 0xBA, .pll[14] = 0xBB,
1451 .pll[15] = 0x08, .pll[16] = 0x08, .pll[17] = 0x8F, .pll[18] = 0x84, .pll[19] = 0x23,
1454 static const struct intel_c10pll_state * const mtl_c10_hdmi_tables[] = {
1455 &mtl_c10_hdmi_25_2, /* Consolidated Table */
1456 &mtl_c10_hdmi_27_0, /* Consolidated Table */
1457 &mtl_c10_hdmi_27027,
1458 &mtl_c10_hdmi_28320,
1459 &mtl_c10_hdmi_30240,
1460 &mtl_c10_hdmi_31500,
1461 &mtl_c10_hdmi_36000,
1462 &mtl_c10_hdmi_40000,
1463 &mtl_c10_hdmi_49500,
1464 &mtl_c10_hdmi_50000,
1465 &mtl_c10_hdmi_57284,
1466 &mtl_c10_hdmi_58000,
1467 &mtl_c10_hdmi_65000,
1468 &mtl_c10_hdmi_71000,
1469 &mtl_c10_hdmi_74176,
1470 &mtl_c10_hdmi_74_25, /* Consolidated Table */
1471 &mtl_c10_hdmi_75000,
1472 &mtl_c10_hdmi_78750,
1473 &mtl_c10_hdmi_85500,
1474 &mtl_c10_hdmi_88750,
1475 &mtl_c10_hdmi_106500,
1476 &mtl_c10_hdmi_108000,
1477 &mtl_c10_hdmi_115500,
1478 &mtl_c10_hdmi_119000,
1479 &mtl_c10_hdmi_135000,
1480 &mtl_c10_hdmi_138500,
1481 &mtl_c10_hdmi_147160,
1482 &mtl_c10_hdmi_148352,
1483 &mtl_c10_hdmi_148_5, /* Consolidated Table */
1484 &mtl_c10_hdmi_154000,
1485 &mtl_c10_hdmi_162000,
1486 &mtl_c10_hdmi_167000,
1487 &mtl_c10_hdmi_197802,
1488 &mtl_c10_hdmi_198000,
1489 &mtl_c10_hdmi_209800,
1490 &mtl_c10_hdmi_241500,
1491 &mtl_c10_hdmi_262750,
1492 &mtl_c10_hdmi_268500,
1493 &mtl_c10_hdmi_296703,
1494 &mtl_c10_hdmi_297000,
1495 &mtl_c10_hdmi_319750,
1496 &mtl_c10_hdmi_497750,
1497 &mtl_c10_hdmi_592000,
1498 &mtl_c10_hdmi_593407,
1499 &mtl_c10_hdmi_594, /* Consolidated Table */
1503 static const struct intel_c20pll_state mtl_c20_hdmi_25_175 = {
1504 .link_bit_rate = 25175,
1506 .tx = { 0xbe88, /* tx cfg0 */
1507 0x9800, /* tx cfg1 */
1508 0x0000, /* tx cfg2 */
1510 .cmn = { 0x0500, /* cmn cfg0*/
1511 0x0005, /* cmn cfg1 */
1512 0x0000, /* cmn cfg2 */
1513 0x0000, /* cmn cfg3 */
1515 .mpllb = { 0xa0d2, /* mpllb cfg0 */
1516 0x7d80, /* mpllb cfg1 */
1517 0x0906, /* mpllb cfg2 */
1518 0xbe40, /* mpllb cfg3 */
1519 0x0000, /* mpllb cfg4 */
1520 0x0000, /* mpllb cfg5 */
1521 0x0200, /* mpllb cfg6 */
1522 0x0001, /* mpllb cfg7 */
1523 0x0000, /* mpllb cfg8 */
1524 0x0000, /* mpllb cfg9 */
1525 0x0001, /* mpllb cfg10 */
1529 static const struct intel_c20pll_state mtl_c20_hdmi_27_0 = {
1530 .link_bit_rate = 27000,
1532 .tx = { 0xbe88, /* tx cfg0 */
1533 0x9800, /* tx cfg1 */
1534 0x0000, /* tx cfg2 */
1536 .cmn = { 0x0500, /* cmn cfg0*/
1537 0x0005, /* cmn cfg1 */
1538 0x0000, /* cmn cfg2 */
1539 0x0000, /* cmn cfg3 */
1541 .mpllb = { 0xa0e0, /* mpllb cfg0 */
1542 0x7d80, /* mpllb cfg1 */
1543 0x0906, /* mpllb cfg2 */
1544 0xbe40, /* mpllb cfg3 */
1545 0x0000, /* mpllb cfg4 */
1546 0x0000, /* mpllb cfg5 */
1547 0x2200, /* mpllb cfg6 */
1548 0x0001, /* mpllb cfg7 */
1549 0x8000, /* mpllb cfg8 */
1550 0x0000, /* mpllb cfg9 */
1551 0x0001, /* mpllb cfg10 */
1555 static const struct intel_c20pll_state mtl_c20_hdmi_74_25 = {
1556 .link_bit_rate = 74250,
1558 .tx = { 0xbe88, /* tx cfg0 */
1559 0x9800, /* tx cfg1 */
1560 0x0000, /* tx cfg2 */
1562 .cmn = { 0x0500, /* cmn cfg0*/
1563 0x0005, /* cmn cfg1 */
1564 0x0000, /* cmn cfg2 */
1565 0x0000, /* cmn cfg3 */
1567 .mpllb = { 0x609a, /* mpllb cfg0 */
1568 0x7d40, /* mpllb cfg1 */
1569 0xca06, /* mpllb cfg2 */
1570 0xbe40, /* mpllb cfg3 */
1571 0x0000, /* mpllb cfg4 */
1572 0x0000, /* mpllb cfg5 */
1573 0x2200, /* mpllb cfg6 */
1574 0x0001, /* mpllb cfg7 */
1575 0x5800, /* mpllb cfg8 */
1576 0x0000, /* mpllb cfg9 */
1577 0x0001, /* mpllb cfg10 */
1581 static const struct intel_c20pll_state mtl_c20_hdmi_148_5 = {
1582 .link_bit_rate = 148500,
1584 .tx = { 0xbe88, /* tx cfg0 */
1585 0x9800, /* tx cfg1 */
1586 0x0000, /* tx cfg2 */
1588 .cmn = { 0x0500, /* cmn cfg0*/
1589 0x0005, /* cmn cfg1 */
1590 0x0000, /* cmn cfg2 */
1591 0x0000, /* cmn cfg3 */
1593 .mpllb = { 0x409a, /* mpllb cfg0 */
1594 0x7d20, /* mpllb cfg1 */
1595 0xca06, /* mpllb cfg2 */
1596 0xbe40, /* mpllb cfg3 */
1597 0x0000, /* mpllb cfg4 */
1598 0x0000, /* mpllb cfg5 */
1599 0x2200, /* mpllb cfg6 */
1600 0x0001, /* mpllb cfg7 */
1601 0x5800, /* mpllb cfg8 */
1602 0x0000, /* mpllb cfg9 */
1603 0x0001, /* mpllb cfg10 */
1607 static const struct intel_c20pll_state mtl_c20_hdmi_594 = {
1608 .link_bit_rate = 594000,
1610 .tx = { 0xbe88, /* tx cfg0 */
1611 0x9800, /* tx cfg1 */
1612 0x0000, /* tx cfg2 */
1614 .cmn = { 0x0500, /* cmn cfg0*/
1615 0x0005, /* cmn cfg1 */
1616 0x0000, /* cmn cfg2 */
1617 0x0000, /* cmn cfg3 */
1619 .mpllb = { 0x009a, /* mpllb cfg0 */
1620 0x7d08, /* mpllb cfg1 */
1621 0xca06, /* mpllb cfg2 */
1622 0xbe40, /* mpllb cfg3 */
1623 0x0000, /* mpllb cfg4 */
1624 0x0000, /* mpllb cfg5 */
1625 0x2200, /* mpllb cfg6 */
1626 0x0001, /* mpllb cfg7 */
1627 0x5800, /* mpllb cfg8 */
1628 0x0000, /* mpllb cfg9 */
1629 0x0001, /* mpllb cfg10 */
1633 static const struct intel_c20pll_state mtl_c20_hdmi_300 = {
1634 .link_bit_rate = 3000000,
1636 .tx = { 0xbe98, /* tx cfg0 */
1637 0x9800, /* tx cfg1 */
1638 0x0000, /* tx cfg2 */
1640 .cmn = { 0x0500, /* cmn cfg0*/
1641 0x0005, /* cmn cfg1 */
1642 0x0000, /* cmn cfg2 */
1643 0x0000, /* cmn cfg3 */
1645 .mpllb = { 0x209c, /* mpllb cfg0 */
1646 0x7d10, /* mpllb cfg1 */
1647 0xca06, /* mpllb cfg2 */
1648 0xbe40, /* mpllb cfg3 */
1649 0x0000, /* mpllb cfg4 */
1650 0x0000, /* mpllb cfg5 */
1651 0x2200, /* mpllb cfg6 */
1652 0x0001, /* mpllb cfg7 */
1653 0x2000, /* mpllb cfg8 */
1654 0x0000, /* mpllb cfg9 */
1655 0x0004, /* mpllb cfg10 */
1659 static const struct intel_c20pll_state mtl_c20_hdmi_600 = {
1660 .link_bit_rate = 6000000,
1662 .tx = { 0xbe98, /* tx cfg0 */
1663 0x9800, /* tx cfg1 */
1664 0x0000, /* tx cfg2 */
1666 .cmn = { 0x0500, /* cmn cfg0*/
1667 0x0005, /* cmn cfg1 */
1668 0x0000, /* cmn cfg2 */
1669 0x0000, /* cmn cfg3 */
1671 .mpllb = { 0x009c, /* mpllb cfg0 */
1672 0x7d08, /* mpllb cfg1 */
1673 0xca06, /* mpllb cfg2 */
1674 0xbe40, /* mpllb cfg3 */
1675 0x0000, /* mpllb cfg4 */
1676 0x0000, /* mpllb cfg5 */
1677 0x2200, /* mpllb cfg6 */
1678 0x0001, /* mpllb cfg7 */
1679 0x2000, /* mpllb cfg8 */
1680 0x0000, /* mpllb cfg9 */
1681 0x0004, /* mpllb cfg10 */
1685 static const struct intel_c20pll_state mtl_c20_hdmi_800 = {
1686 .link_bit_rate = 8000000,
1688 .tx = { 0xbe98, /* tx cfg0 */
1689 0x9800, /* tx cfg1 */
1690 0x0000, /* tx cfg2 */
1692 .cmn = { 0x0500, /* cmn cfg0*/
1693 0x0005, /* cmn cfg1 */
1694 0x0000, /* cmn cfg2 */
1695 0x0000, /* cmn cfg3 */
1697 .mpllb = { 0x00d0, /* mpllb cfg0 */
1698 0x7d08, /* mpllb cfg1 */
1699 0x4a06, /* mpllb cfg2 */
1700 0xbe40, /* mpllb cfg3 */
1701 0x0000, /* mpllb cfg4 */
1702 0x0000, /* mpllb cfg5 */
1703 0x2200, /* mpllb cfg6 */
1704 0x0003, /* mpllb cfg7 */
1705 0x2aaa, /* mpllb cfg8 */
1706 0x0002, /* mpllb cfg9 */
1707 0x0004, /* mpllb cfg10 */
1711 static const struct intel_c20pll_state mtl_c20_hdmi_1000 = {
1712 .link_bit_rate = 10000000,
1714 .tx = { 0xbe98, /* tx cfg0 */
1715 0x9800, /* tx cfg1 */
1716 0x0000, /* tx cfg2 */
1718 .cmn = { 0x0500, /* cmn cfg0*/
1719 0x0005, /* cmn cfg1 */
1720 0x0000, /* cmn cfg2 */
1721 0x0000, /* cmn cfg3 */
1723 .mpllb = { 0x1104, /* mpllb cfg0 */
1724 0x7d08, /* mpllb cfg1 */
1725 0x0a06, /* mpllb cfg2 */
1726 0xbe40, /* mpllb cfg3 */
1727 0x0000, /* mpllb cfg4 */
1728 0x0000, /* mpllb cfg5 */
1729 0x2200, /* mpllb cfg6 */
1730 0x0003, /* mpllb cfg7 */
1731 0x3555, /* mpllb cfg8 */
1732 0x0001, /* mpllb cfg9 */
1733 0x0004, /* mpllb cfg10 */
1737 static const struct intel_c20pll_state mtl_c20_hdmi_1200 = {
1738 .link_bit_rate = 12000000,
1740 .tx = { 0xbe98, /* tx cfg0 */
1741 0x9800, /* tx cfg1 */
1742 0x0000, /* tx cfg2 */
1744 .cmn = { 0x0500, /* cmn cfg0*/
1745 0x0005, /* cmn cfg1 */
1746 0x0000, /* cmn cfg2 */
1747 0x0000, /* cmn cfg3 */
1749 .mpllb = { 0x0138, /* mpllb cfg0 */
1750 0x7d08, /* mpllb cfg1 */
1751 0x5486, /* mpllb cfg2 */
1752 0xfe40, /* mpllb cfg3 */
1753 0x0000, /* mpllb cfg4 */
1754 0x0000, /* mpllb cfg5 */
1755 0x2200, /* mpllb cfg6 */
1756 0x0001, /* mpllb cfg7 */
1757 0x4000, /* mpllb cfg8 */
1758 0x0000, /* mpllb cfg9 */
1759 0x0004, /* mpllb cfg10 */
1763 static const struct intel_c20pll_state * const mtl_c20_hdmi_tables[] = {
1764 &mtl_c20_hdmi_25_175,
1766 &mtl_c20_hdmi_74_25,
1767 &mtl_c20_hdmi_148_5,
1777 static int intel_c10_phy_check_hdmi_link_rate(int clock)
1779 const struct intel_c10pll_state * const *tables = mtl_c10_hdmi_tables;
1782 for (i = 0; tables[i]; i++) {
1783 if (clock == tables[i]->clock)
1787 return MODE_CLOCK_RANGE;
1790 static const struct intel_c10pll_state * const *
1791 intel_c10pll_tables_get(struct intel_crtc_state *crtc_state,
1792 struct intel_encoder *encoder)
1794 if (intel_crtc_has_dp_encoder(crtc_state)) {
1795 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1796 return mtl_c10_edp_tables;
1798 return mtl_c10_dp_tables;
1799 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1800 return mtl_c10_hdmi_tables;
1803 MISSING_CASE(encoder->type);
1807 static void intel_c10pll_update_pll(struct intel_crtc_state *crtc_state,
1808 struct intel_encoder *encoder)
1810 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1811 struct intel_cx0pll_state *pll_state = &crtc_state->cx0pll_state;
1814 if (intel_crtc_has_dp_encoder(crtc_state)) {
1815 if (intel_panel_use_ssc(i915)) {
1816 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1818 pll_state->ssc_enabled =
1819 (intel_dp->dpcd[DP_MAX_DOWNSPREAD] & DP_MAX_DOWNSPREAD_0_5);
1823 if (pll_state->ssc_enabled)
1826 drm_WARN_ON(&i915->drm, ARRAY_SIZE(pll_state->c10.pll) < 9);
1827 for (i = 4; i < 9; i++)
1828 pll_state->c10.pll[i] = 0;
1831 static int intel_c10pll_calc_state(struct intel_crtc_state *crtc_state,
1832 struct intel_encoder *encoder)
1834 const struct intel_c10pll_state * const *tables;
1837 tables = intel_c10pll_tables_get(crtc_state, encoder);
1841 for (i = 0; tables[i]; i++) {
1842 if (crtc_state->port_clock == tables[i]->clock) {
1843 crtc_state->cx0pll_state.c10 = *tables[i];
1844 intel_c10pll_update_pll(crtc_state, encoder);
1853 void intel_c10pll_readout_hw_state(struct intel_encoder *encoder,
1854 struct intel_c10pll_state *pll_state)
1856 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1857 u8 lane = INTEL_CX0_LANE0;
1858 intel_wakeref_t wakeref;
1861 wakeref = intel_cx0_phy_transaction_begin(encoder);
1864 * According to C10 VDR Register programming Sequence we need
1865 * to do this to read PHY internal registers from MsgBus.
1867 intel_cx0_rmw(i915, encoder->port, lane, PHY_C10_VDR_CONTROL(1),
1868 0, C10_VDR_CTRL_MSGBUS_ACCESS,
1869 MB_WRITE_COMMITTED);
1871 for (i = 0; i < ARRAY_SIZE(pll_state->pll); i++)
1872 pll_state->pll[i] = intel_cx0_read(i915, encoder->port, lane,
1873 PHY_C10_VDR_PLL(i));
1875 pll_state->cmn = intel_cx0_read(i915, encoder->port, lane, PHY_C10_VDR_CMN(0));
1876 pll_state->tx = intel_cx0_read(i915, encoder->port, lane, PHY_C10_VDR_TX(0));
1878 intel_cx0_phy_transaction_end(encoder, wakeref);
1881 static void intel_c10_pll_program(struct drm_i915_private *i915,
1882 const struct intel_crtc_state *crtc_state,
1883 struct intel_encoder *encoder)
1885 const struct intel_c10pll_state *pll_state = &crtc_state->cx0pll_state.c10;
1888 intel_cx0_rmw(i915, encoder->port, INTEL_CX0_BOTH_LANES, PHY_C10_VDR_CONTROL(1),
1889 0, C10_VDR_CTRL_MSGBUS_ACCESS,
1890 MB_WRITE_COMMITTED);
1892 /* Custom width needs to be programmed to 0 for both the phy lanes */
1893 intel_cx0_rmw(i915, encoder->port, INTEL_CX0_BOTH_LANES, PHY_C10_VDR_CUSTOM_WIDTH,
1894 C10_VDR_CUSTOM_WIDTH_MASK, C10_VDR_CUSTOM_WIDTH_8_10,
1895 MB_WRITE_COMMITTED);
1896 intel_cx0_rmw(i915, encoder->port, INTEL_CX0_BOTH_LANES, PHY_C10_VDR_CONTROL(1),
1897 0, C10_VDR_CTRL_UPDATE_CFG,
1898 MB_WRITE_COMMITTED);
1900 /* Program the pll values only for the master lane */
1901 for (i = 0; i < ARRAY_SIZE(pll_state->pll); i++)
1902 intel_cx0_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C10_VDR_PLL(i),
1904 (i % 4) ? MB_WRITE_UNCOMMITTED : MB_WRITE_COMMITTED);
1906 intel_cx0_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C10_VDR_CMN(0), pll_state->cmn, MB_WRITE_COMMITTED);
1907 intel_cx0_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C10_VDR_TX(0), pll_state->tx, MB_WRITE_COMMITTED);
1909 intel_cx0_rmw(i915, encoder->port, INTEL_CX0_LANE0, PHY_C10_VDR_CONTROL(1),
1910 0, C10_VDR_CTRL_MASTER_LANE | C10_VDR_CTRL_UPDATE_CFG,
1911 MB_WRITE_COMMITTED);
1914 void intel_c10pll_dump_hw_state(struct drm_i915_private *i915,
1915 const struct intel_c10pll_state *hw_state)
1919 unsigned int frac_quot = 0, frac_rem = 0, frac_den = 1;
1920 unsigned int multiplier, tx_clk_div;
1922 fracen = hw_state->pll[0] & C10_PLL0_FRACEN;
1923 drm_dbg_kms(&i915->drm, "c10pll_hw_state: fracen: %s, ",
1924 str_yes_no(fracen));
1927 frac_quot = hw_state->pll[12] << 8 | hw_state->pll[11];
1928 frac_rem = hw_state->pll[14] << 8 | hw_state->pll[13];
1929 frac_den = hw_state->pll[10] << 8 | hw_state->pll[9];
1930 drm_dbg_kms(&i915->drm, "quot: %u, rem: %u, den: %u,\n",
1931 frac_quot, frac_rem, frac_den);
1934 multiplier = (REG_FIELD_GET8(C10_PLL3_MULTIPLIERH_MASK, hw_state->pll[3]) << 8 |
1935 hw_state->pll[2]) / 2 + 16;
1936 tx_clk_div = REG_FIELD_GET8(C10_PLL15_TXCLKDIV_MASK, hw_state->pll[15]);
1937 drm_dbg_kms(&i915->drm,
1938 "multiplier: %u, tx_clk_div: %u.\n", multiplier, tx_clk_div);
1940 drm_dbg_kms(&i915->drm, "c10pll_rawhw_state:");
1941 drm_dbg_kms(&i915->drm, "tx: 0x%x, cmn: 0x%x\n", hw_state->tx, hw_state->cmn);
1943 BUILD_BUG_ON(ARRAY_SIZE(hw_state->pll) % 4);
1944 for (i = 0; i < ARRAY_SIZE(hw_state->pll); i = i + 4)
1945 drm_dbg_kms(&i915->drm, "pll[%d] = 0x%x, pll[%d] = 0x%x, pll[%d] = 0x%x, pll[%d] = 0x%x\n",
1946 i, hw_state->pll[i], i + 1, hw_state->pll[i + 1],
1947 i + 2, hw_state->pll[i + 2], i + 3, hw_state->pll[i + 3]);
1950 static int intel_c20_compute_hdmi_tmds_pll(u64 pixel_clock, struct intel_c20pll_state *pll_state)
1953 u64 mpll_tx_clk_div;
1957 u64 mpll_multiplier;
1958 u64 mpll_fracn_quot;
1960 u8 mpllb_ana_freq_vco;
1961 u8 mpll_div_multiplier;
1963 if (pixel_clock < 25175 || pixel_clock > 600000)
1966 datarate = ((u64)pixel_clock * 1000) * 10;
1967 mpll_tx_clk_div = ilog2(div64_u64((u64)CLOCK_9999MHZ, (u64)datarate));
1968 vco_freq_shift = ilog2(div64_u64((u64)CLOCK_4999MHZ * (u64)256, (u64)datarate));
1969 vco_freq = (datarate << vco_freq_shift) >> 8;
1970 multiplier = div64_u64((vco_freq << 28), (REFCLK_38_4_MHZ >> 4));
1971 mpll_multiplier = 2 * (multiplier >> 32);
1973 mpll_fracn_quot = (multiplier >> 16) & 0xFFFF;
1974 mpll_fracn_rem = multiplier & 0xFFFF;
1976 mpll_div_multiplier = min_t(u8, div64_u64((vco_freq * 16 + (datarate >> 1)),
1979 if (vco_freq <= DATARATE_3000000000)
1980 mpllb_ana_freq_vco = MPLLB_ANA_FREQ_VCO_3;
1981 else if (vco_freq <= DATARATE_3500000000)
1982 mpllb_ana_freq_vco = MPLLB_ANA_FREQ_VCO_2;
1983 else if (vco_freq <= DATARATE_4000000000)
1984 mpllb_ana_freq_vco = MPLLB_ANA_FREQ_VCO_1;
1986 mpllb_ana_freq_vco = MPLLB_ANA_FREQ_VCO_0;
1988 pll_state->link_bit_rate = pixel_clock;
1989 pll_state->clock = pixel_clock;
1990 pll_state->tx[0] = 0xbe88;
1991 pll_state->tx[1] = 0x9800;
1992 pll_state->tx[2] = 0x0000;
1993 pll_state->cmn[0] = 0x0500;
1994 pll_state->cmn[1] = 0x0005;
1995 pll_state->cmn[2] = 0x0000;
1996 pll_state->cmn[3] = 0x0000;
1997 pll_state->mpllb[0] = (MPLL_TX_CLK_DIV(mpll_tx_clk_div) |
1998 MPLL_MULTIPLIER(mpll_multiplier));
1999 pll_state->mpllb[1] = (CAL_DAC_CODE(CAL_DAC_CODE_31) |
2001 MPLL_DIV_MULTIPLIER(mpll_div_multiplier));
2002 pll_state->mpllb[2] = (MPLLB_ANA_FREQ_VCO(mpllb_ana_freq_vco) |
2003 CP_PROP(CP_PROP_20) |
2005 pll_state->mpllb[3] = (V2I(V2I_2) |
2006 CP_PROP_GS(CP_PROP_GS_30) |
2007 CP_INT_GS(CP_INT_GS_28));
2008 pll_state->mpllb[4] = 0x0000;
2009 pll_state->mpllb[5] = 0x0000;
2010 pll_state->mpllb[6] = (C20_MPLLB_FRACEN | SSC_UP_SPREAD);
2011 pll_state->mpllb[7] = MPLL_FRACN_DEN;
2012 pll_state->mpllb[8] = mpll_fracn_quot;
2013 pll_state->mpllb[9] = mpll_fracn_rem;
2014 pll_state->mpllb[10] = HDMI_DIV(HDMI_DIV_1);
2019 static int intel_c20_phy_check_hdmi_link_rate(int clock)
2021 const struct intel_c20pll_state * const *tables = mtl_c20_hdmi_tables;
2024 for (i = 0; tables[i]; i++) {
2025 if (clock == tables[i]->link_bit_rate)
2029 if (clock >= 25175 && clock <= 594000)
2032 return MODE_CLOCK_RANGE;
2035 int intel_cx0_phy_check_hdmi_link_rate(struct intel_hdmi *hdmi, int clock)
2037 struct intel_digital_port *dig_port = hdmi_to_dig_port(hdmi);
2038 struct drm_i915_private *i915 = intel_hdmi_to_i915(hdmi);
2039 enum phy phy = intel_port_to_phy(i915, dig_port->base.port);
2041 if (intel_is_c10phy(i915, phy))
2042 return intel_c10_phy_check_hdmi_link_rate(clock);
2043 return intel_c20_phy_check_hdmi_link_rate(clock);
2046 static const struct intel_c20pll_state * const *
2047 intel_c20_pll_tables_get(struct intel_crtc_state *crtc_state,
2048 struct intel_encoder *encoder)
2050 if (intel_crtc_has_dp_encoder(crtc_state))
2051 return mtl_c20_dp_tables;
2052 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2053 return mtl_c20_hdmi_tables;
2055 MISSING_CASE(encoder->type);
2059 static int intel_c20pll_calc_state(struct intel_crtc_state *crtc_state,
2060 struct intel_encoder *encoder)
2062 const struct intel_c20pll_state * const *tables;
2065 /* try computed C20 HDMI tables before using consolidated tables */
2066 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
2067 if (intel_c20_compute_hdmi_tmds_pll(crtc_state->port_clock,
2068 &crtc_state->cx0pll_state.c20) == 0)
2072 tables = intel_c20_pll_tables_get(crtc_state, encoder);
2076 for (i = 0; tables[i]; i++) {
2077 if (crtc_state->port_clock == tables[i]->link_bit_rate) {
2078 crtc_state->cx0pll_state.c20 = *tables[i];
2086 int intel_cx0pll_calc_state(struct intel_crtc_state *crtc_state,
2087 struct intel_encoder *encoder)
2089 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2090 enum phy phy = intel_port_to_phy(i915, encoder->port);
2092 if (intel_is_c10phy(i915, phy))
2093 return intel_c10pll_calc_state(crtc_state, encoder);
2094 return intel_c20pll_calc_state(crtc_state, encoder);
2097 static bool intel_c20_use_mplla(u32 clock)
2099 /* 10G and 20G rates use MPLLA */
2100 if (clock == 312500 || clock == 625000)
2106 void intel_c20pll_readout_hw_state(struct intel_encoder *encoder,
2107 struct intel_c20pll_state *pll_state)
2109 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2111 intel_wakeref_t wakeref;
2114 wakeref = intel_cx0_phy_transaction_begin(encoder);
2116 /* 1. Read current context selection */
2117 cntx = intel_cx0_read(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_VDR_CUSTOM_SERDES_RATE) & PHY_C20_CONTEXT_TOGGLE;
2119 /* Read Tx configuration */
2120 for (i = 0; i < ARRAY_SIZE(pll_state->tx); i++) {
2122 pll_state->tx[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2123 PHY_C20_B_TX_CNTX_CFG(i));
2125 pll_state->tx[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2126 PHY_C20_A_TX_CNTX_CFG(i));
2129 /* Read common configuration */
2130 for (i = 0; i < ARRAY_SIZE(pll_state->cmn); i++) {
2132 pll_state->cmn[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2133 PHY_C20_B_CMN_CNTX_CFG(i));
2135 pll_state->cmn[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2136 PHY_C20_A_CMN_CNTX_CFG(i));
2139 if (pll_state->tx[0] & C20_PHY_USE_MPLLB) {
2140 /* MPLLB configuration */
2141 for (i = 0; i < ARRAY_SIZE(pll_state->mpllb); i++) {
2143 pll_state->mpllb[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2144 PHY_C20_B_MPLLB_CNTX_CFG(i));
2146 pll_state->mpllb[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2147 PHY_C20_A_MPLLB_CNTX_CFG(i));
2150 /* MPLLA configuration */
2151 for (i = 0; i < ARRAY_SIZE(pll_state->mplla); i++) {
2153 pll_state->mplla[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2154 PHY_C20_B_MPLLA_CNTX_CFG(i));
2156 pll_state->mplla[i] = intel_c20_sram_read(i915, encoder->port, INTEL_CX0_LANE0,
2157 PHY_C20_A_MPLLA_CNTX_CFG(i));
2161 intel_cx0_phy_transaction_end(encoder, wakeref);
2164 void intel_c20pll_dump_hw_state(struct drm_i915_private *i915,
2165 const struct intel_c20pll_state *hw_state)
2169 drm_dbg_kms(&i915->drm, "c20pll_hw_state:\n");
2170 drm_dbg_kms(&i915->drm, "tx[0] = 0x%.4x, tx[1] = 0x%.4x, tx[2] = 0x%.4x\n",
2171 hw_state->tx[0], hw_state->tx[1], hw_state->tx[2]);
2172 drm_dbg_kms(&i915->drm, "cmn[0] = 0x%.4x, cmn[1] = 0x%.4x, cmn[2] = 0x%.4x, cmn[3] = 0x%.4x\n",
2173 hw_state->cmn[0], hw_state->cmn[1], hw_state->cmn[2], hw_state->cmn[3]);
2175 if (intel_c20_use_mplla(hw_state->clock)) {
2176 for (i = 0; i < ARRAY_SIZE(hw_state->mplla); i++)
2177 drm_dbg_kms(&i915->drm, "mplla[%d] = 0x%.4x\n", i, hw_state->mplla[i]);
2179 for (i = 0; i < ARRAY_SIZE(hw_state->mpllb); i++)
2180 drm_dbg_kms(&i915->drm, "mpllb[%d] = 0x%.4x\n", i, hw_state->mpllb[i]);
2184 static u8 intel_c20_get_dp_rate(u32 clock)
2187 case 162000: /* 1.62 Gbps DP1.4 */
2189 case 270000: /* 2.7 Gbps DP1.4 */
2191 case 540000: /* 5.4 Gbps DP 1.4 */
2193 case 810000: /* 8.1 Gbps DP1.4 */
2195 case 216000: /* 2.16 Gbps eDP */
2197 case 243000: /* 2.43 Gbps eDP */
2199 case 324000: /* 3.24 Gbps eDP */
2201 case 432000: /* 4.32 Gbps eDP */
2203 case 312500: /* 10 Gbps DP2.0 */
2205 case 421875: /* 13.5 Gbps DP2.0 */
2207 case 625000: /* 20 Gbps DP2.0*/
2209 case 648000: /* 6.48 Gbps eDP*/
2211 case 675000: /* 6.75 Gbps eDP*/
2214 MISSING_CASE(clock);
2219 static u8 intel_c20_get_hdmi_rate(u32 clock)
2221 if (clock >= 25175 && clock <= 600000)
2225 case 166670: /* 3 Gbps */
2226 case 333330: /* 6 Gbps */
2227 case 666670: /* 12 Gbps */
2229 case 444440: /* 8 Gbps */
2231 case 555560: /* 10 Gbps */
2234 MISSING_CASE(clock);
2239 static bool is_dp2(u32 clock)
2241 /* DP2.0 clock rates */
2242 if (clock == 312500 || clock == 421875 || clock == 625000)
2248 static bool is_hdmi_frl(u32 clock)
2251 case 166670: /* 3 Gbps */
2252 case 333330: /* 6 Gbps */
2253 case 444440: /* 8 Gbps */
2254 case 555560: /* 10 Gbps */
2255 case 666670: /* 12 Gbps */
2262 static bool intel_c20_protocol_switch_valid(struct intel_encoder *encoder)
2264 struct intel_digital_port *intel_dig_port = enc_to_dig_port(encoder);
2266 /* banks should not be cleared for DPALT/USB4/TBT modes */
2267 /* TODO: optimize re-calibration in legacy mode */
2268 return intel_tc_port_in_legacy_mode(intel_dig_port);
2271 static int intel_get_c20_custom_width(u32 clock, bool dp)
2273 if (dp && is_dp2(clock))
2275 else if (is_hdmi_frl(clock))
2281 static void intel_c20_pll_program(struct drm_i915_private *i915,
2282 const struct intel_crtc_state *crtc_state,
2283 struct intel_encoder *encoder)
2285 const struct intel_c20pll_state *pll_state = &crtc_state->cx0pll_state.c20;
2287 int lane = crtc_state->lane_count > 2 ? INTEL_CX0_BOTH_LANES : INTEL_CX0_LANE0;
2291 if (intel_crtc_has_dp_encoder(crtc_state))
2294 /* 1. Read current context selection */
2295 cntx = intel_cx0_read(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_VDR_CUSTOM_SERDES_RATE) & BIT(0);
2298 * 2. If there is a protocol switch from HDMI to DP or vice versa, clear
2299 * the lane #0 MPLLB CAL_DONE_BANK DP2.0 10G and 20G rates enable MPLLA.
2300 * Protocol switch is only applicable for MPLLA
2302 if (intel_c20_protocol_switch_valid(encoder)) {
2303 for (i = 0; i < 4; i++)
2304 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0, RAWLANEAONX_DIG_TX_MPLLB_CAL_DONE_BANK(i), 0);
2305 usleep_range(4000, 4100);
2308 /* 3. Write SRAM configuration context. If A in use, write configuration to B context */
2309 /* 3.1 Tx configuration */
2310 for (i = 0; i < ARRAY_SIZE(pll_state->tx); i++) {
2312 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_A_TX_CNTX_CFG(i), pll_state->tx[i]);
2314 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_B_TX_CNTX_CFG(i), pll_state->tx[i]);
2317 /* 3.2 common configuration */
2318 for (i = 0; i < ARRAY_SIZE(pll_state->cmn); i++) {
2320 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_A_CMN_CNTX_CFG(i), pll_state->cmn[i]);
2322 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0, PHY_C20_B_CMN_CNTX_CFG(i), pll_state->cmn[i]);
2325 /* 3.3 mpllb or mplla configuration */
2326 if (intel_c20_use_mplla(pll_state->clock)) {
2327 for (i = 0; i < ARRAY_SIZE(pll_state->mplla); i++) {
2329 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0,
2330 PHY_C20_A_MPLLA_CNTX_CFG(i),
2331 pll_state->mplla[i]);
2333 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0,
2334 PHY_C20_B_MPLLA_CNTX_CFG(i),
2335 pll_state->mplla[i]);
2338 for (i = 0; i < ARRAY_SIZE(pll_state->mpllb); i++) {
2340 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0,
2341 PHY_C20_A_MPLLB_CNTX_CFG(i),
2342 pll_state->mpllb[i]);
2344 intel_c20_sram_write(i915, encoder->port, INTEL_CX0_LANE0,
2345 PHY_C20_B_MPLLB_CNTX_CFG(i),
2346 pll_state->mpllb[i]);
2350 /* 4. Program custom width to match the link protocol */
2351 intel_cx0_rmw(i915, encoder->port, lane, PHY_C20_VDR_CUSTOM_WIDTH,
2352 PHY_C20_CUSTOM_WIDTH_MASK,
2353 PHY_C20_CUSTOM_WIDTH(intel_get_c20_custom_width(pll_state->clock, dp)),
2354 MB_WRITE_COMMITTED);
2356 /* 5. For DP or 6. For HDMI */
2358 intel_cx0_rmw(i915, encoder->port, lane, PHY_C20_VDR_CUSTOM_SERDES_RATE,
2359 BIT(6) | PHY_C20_CUSTOM_SERDES_MASK,
2360 BIT(6) | PHY_C20_CUSTOM_SERDES(intel_c20_get_dp_rate(pll_state->clock)),
2361 MB_WRITE_COMMITTED);
2363 intel_cx0_rmw(i915, encoder->port, lane, PHY_C20_VDR_CUSTOM_SERDES_RATE,
2364 BIT(7) | PHY_C20_CUSTOM_SERDES_MASK,
2365 is_hdmi_frl(pll_state->clock) ? BIT(7) : 0,
2366 MB_WRITE_COMMITTED);
2368 intel_cx0_write(i915, encoder->port, INTEL_CX0_BOTH_LANES, PHY_C20_VDR_HDMI_RATE,
2369 intel_c20_get_hdmi_rate(pll_state->clock),
2370 MB_WRITE_COMMITTED);
2374 * 7. Write Vendor specific registers to toggle context setting to load
2375 * the updated programming toggle context bit
2377 intel_cx0_rmw(i915, encoder->port, lane, PHY_C20_VDR_CUSTOM_SERDES_RATE,
2378 BIT(0), cntx ? 0 : 1, MB_WRITE_COMMITTED);
2381 int intel_c10pll_calc_port_clock(struct intel_encoder *encoder,
2382 const struct intel_c10pll_state *pll_state)
2384 unsigned int frac_quot = 0, frac_rem = 0, frac_den = 1;
2385 unsigned int multiplier, tx_clk_div, hdmi_div, refclk = 38400;
2388 if (pll_state->pll[0] & C10_PLL0_FRACEN) {
2389 frac_quot = pll_state->pll[12] << 8 | pll_state->pll[11];
2390 frac_rem = pll_state->pll[14] << 8 | pll_state->pll[13];
2391 frac_den = pll_state->pll[10] << 8 | pll_state->pll[9];
2394 multiplier = (REG_FIELD_GET8(C10_PLL3_MULTIPLIERH_MASK, pll_state->pll[3]) << 8 |
2395 pll_state->pll[2]) / 2 + 16;
2397 tx_clk_div = REG_FIELD_GET8(C10_PLL15_TXCLKDIV_MASK, pll_state->pll[15]);
2398 hdmi_div = REG_FIELD_GET8(C10_PLL15_HDMIDIV_MASK, pll_state->pll[15]);
2400 tmpclk = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(refclk, (multiplier << 16) + frac_quot) +
2401 DIV_ROUND_CLOSEST(refclk * frac_rem, frac_den),
2402 10 << (tx_clk_div + 16));
2403 tmpclk *= (hdmi_div ? 2 : 1);
2408 int intel_c20pll_calc_port_clock(struct intel_encoder *encoder,
2409 const struct intel_c20pll_state *pll_state)
2411 unsigned int frac, frac_en, frac_quot, frac_rem, frac_den;
2412 unsigned int multiplier, refclk = 38400;
2413 unsigned int tx_clk_div;
2414 unsigned int ref_clk_mpllb_div;
2415 unsigned int fb_clk_div4_en;
2416 unsigned int ref, vco;
2417 unsigned int tx_rate_mult;
2418 unsigned int tx_rate = REG_FIELD_GET(C20_PHY_TX_RATE, pll_state->tx[0]);
2420 if (pll_state->tx[0] & C20_PHY_USE_MPLLB) {
2422 frac_en = REG_FIELD_GET(C20_MPLLB_FRACEN, pll_state->mpllb[6]);
2423 frac_quot = pll_state->mpllb[8];
2424 frac_rem = pll_state->mpllb[9];
2425 frac_den = pll_state->mpllb[7];
2426 multiplier = REG_FIELD_GET(C20_MULTIPLIER_MASK, pll_state->mpllb[0]);
2427 tx_clk_div = REG_FIELD_GET(C20_MPLLB_TX_CLK_DIV_MASK, pll_state->mpllb[0]);
2428 ref_clk_mpllb_div = REG_FIELD_GET(C20_REF_CLK_MPLLB_DIV_MASK, pll_state->mpllb[6]);
2432 frac_en = REG_FIELD_GET(C20_MPLLA_FRACEN, pll_state->mplla[6]);
2433 frac_quot = pll_state->mplla[8];
2434 frac_rem = pll_state->mplla[9];
2435 frac_den = pll_state->mplla[7];
2436 multiplier = REG_FIELD_GET(C20_MULTIPLIER_MASK, pll_state->mplla[0]);
2437 tx_clk_div = REG_FIELD_GET(C20_MPLLA_TX_CLK_DIV_MASK, pll_state->mplla[1]);
2438 ref_clk_mpllb_div = REG_FIELD_GET(C20_REF_CLK_MPLLB_DIV_MASK, pll_state->mplla[6]);
2439 fb_clk_div4_en = REG_FIELD_GET(C20_FB_CLK_DIV4_EN, pll_state->mplla[0]);
2443 frac = frac_quot + DIV_ROUND_CLOSEST(frac_rem, frac_den);
2447 ref = DIV_ROUND_CLOSEST(refclk * (1 << (1 + fb_clk_div4_en)), 1 << ref_clk_mpllb_div);
2448 vco = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(ref, (multiplier << (17 - 2)) + frac) >> 17, 10);
2450 return vco << tx_rate_mult >> tx_clk_div >> tx_rate;
2453 static void intel_program_port_clock_ctl(struct intel_encoder *encoder,
2454 const struct intel_crtc_state *crtc_state,
2457 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2460 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL1(encoder->port), XELPDP_PORT_REVERSAL,
2461 lane_reversal ? XELPDP_PORT_REVERSAL : 0);
2464 val |= XELPDP_LANE1_PHY_CLOCK_SELECT;
2466 val |= XELPDP_FORWARD_CLOCK_UNGATE;
2468 if (is_hdmi_frl(crtc_state->port_clock))
2469 val |= XELPDP_DDI_CLOCK_SELECT(XELPDP_DDI_CLOCK_SELECT_DIV18CLK);
2471 val |= XELPDP_DDI_CLOCK_SELECT(XELPDP_DDI_CLOCK_SELECT_MAXPCLK);
2473 /* TODO: HDMI FRL */
2474 /* DP2.0 10G and 20G rates enable MPLLA*/
2475 if (crtc_state->port_clock == 1000000 || crtc_state->port_clock == 2000000)
2476 val |= crtc_state->cx0pll_state.ssc_enabled ? XELPDP_SSC_ENABLE_PLLA : 0;
2478 val |= crtc_state->cx0pll_state.ssc_enabled ? XELPDP_SSC_ENABLE_PLLB : 0;
2480 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2481 XELPDP_LANE1_PHY_CLOCK_SELECT | XELPDP_FORWARD_CLOCK_UNGATE |
2482 XELPDP_DDI_CLOCK_SELECT_MASK | XELPDP_SSC_ENABLE_PLLA |
2483 XELPDP_SSC_ENABLE_PLLB, val);
2486 static u32 intel_cx0_get_powerdown_update(u8 lane_mask)
2491 for_each_cx0_lane_in_mask(lane_mask, lane)
2492 val |= XELPDP_LANE_POWERDOWN_UPDATE(lane);
2497 static u32 intel_cx0_get_powerdown_state(u8 lane_mask, u8 state)
2502 for_each_cx0_lane_in_mask(lane_mask, lane)
2503 val |= XELPDP_LANE_POWERDOWN_NEW_STATE(lane, state);
2508 static void intel_cx0_powerdown_change_sequence(struct drm_i915_private *i915,
2510 u8 lane_mask, u8 state)
2512 enum phy phy = intel_port_to_phy(i915, port);
2515 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL2(port),
2516 intel_cx0_get_powerdown_state(INTEL_CX0_BOTH_LANES, XELPDP_LANE_POWERDOWN_NEW_STATE_MASK),
2517 intel_cx0_get_powerdown_state(lane_mask, state));
2519 /* Wait for pending transactions.*/
2520 for_each_cx0_lane_in_mask(lane_mask, lane)
2521 if (intel_de_wait_for_clear(i915, XELPDP_PORT_M2P_MSGBUS_CTL(port, lane),
2522 XELPDP_PORT_M2P_TRANSACTION_PENDING,
2523 XELPDP_MSGBUS_TIMEOUT_SLOW)) {
2524 drm_dbg_kms(&i915->drm,
2525 "PHY %c Timeout waiting for previous transaction to complete. Reset the bus.\n",
2527 intel_cx0_bus_reset(i915, port, lane);
2530 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL2(port),
2531 intel_cx0_get_powerdown_update(INTEL_CX0_BOTH_LANES),
2532 intel_cx0_get_powerdown_update(lane_mask));
2534 /* Update Timeout Value */
2535 if (__intel_de_wait_for_register(i915, XELPDP_PORT_BUF_CTL2(port),
2536 intel_cx0_get_powerdown_update(lane_mask), 0,
2537 XELPDP_PORT_POWERDOWN_UPDATE_TIMEOUT_US, 0, NULL))
2538 drm_warn(&i915->drm, "PHY %c failed to bring out of Lane reset after %dus.\n",
2539 phy_name(phy), XELPDP_PORT_RESET_START_TIMEOUT_US);
2542 static void intel_cx0_setup_powerdown(struct drm_i915_private *i915, enum port port)
2544 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL2(port),
2545 XELPDP_POWER_STATE_READY_MASK,
2546 XELPDP_POWER_STATE_READY(CX0_P2_STATE_READY));
2547 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL3(port),
2548 XELPDP_POWER_STATE_ACTIVE_MASK |
2549 XELPDP_PLL_LANE_STAGGERING_DELAY_MASK,
2550 XELPDP_POWER_STATE_ACTIVE(CX0_P0_STATE_ACTIVE) |
2551 XELPDP_PLL_LANE_STAGGERING_DELAY(0));
2554 static u32 intel_cx0_get_pclk_refclk_request(u8 lane_mask)
2559 for_each_cx0_lane_in_mask(lane_mask, lane)
2560 val |= XELPDP_LANE_PCLK_REFCLK_REQUEST(lane);
2565 static u32 intel_cx0_get_pclk_refclk_ack(u8 lane_mask)
2570 for_each_cx0_lane_in_mask(lane_mask, lane)
2571 val |= XELPDP_LANE_PCLK_REFCLK_ACK(lane);
2576 static void intel_cx0_phy_lane_reset(struct drm_i915_private *i915,
2577 struct intel_encoder *encoder,
2580 enum port port = encoder->port;
2581 enum phy phy = intel_port_to_phy(i915, port);
2582 u8 owned_lane_mask = intel_cx0_get_owned_lane_mask(i915, encoder);
2583 u8 lane_mask = lane_reversal ? INTEL_CX0_LANE1 : INTEL_CX0_LANE0;
2584 u32 lane_pipe_reset = owned_lane_mask == INTEL_CX0_BOTH_LANES
2585 ? XELPDP_LANE_PIPE_RESET(0) | XELPDP_LANE_PIPE_RESET(1)
2586 : XELPDP_LANE_PIPE_RESET(0);
2587 u32 lane_phy_current_status = owned_lane_mask == INTEL_CX0_BOTH_LANES
2588 ? (XELPDP_LANE_PHY_CURRENT_STATUS(0) |
2589 XELPDP_LANE_PHY_CURRENT_STATUS(1))
2590 : XELPDP_LANE_PHY_CURRENT_STATUS(0);
2592 if (__intel_de_wait_for_register(i915, XELPDP_PORT_BUF_CTL1(port),
2593 XELPDP_PORT_BUF_SOC_PHY_READY,
2594 XELPDP_PORT_BUF_SOC_PHY_READY,
2595 XELPDP_PORT_BUF_SOC_READY_TIMEOUT_US, 0, NULL))
2596 drm_warn(&i915->drm, "PHY %c failed to bring out of SOC reset after %dus.\n",
2597 phy_name(phy), XELPDP_PORT_BUF_SOC_READY_TIMEOUT_US);
2599 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL2(port), lane_pipe_reset,
2602 if (__intel_de_wait_for_register(i915, XELPDP_PORT_BUF_CTL2(port),
2603 lane_phy_current_status, lane_phy_current_status,
2604 XELPDP_PORT_RESET_START_TIMEOUT_US, 0, NULL))
2605 drm_warn(&i915->drm, "PHY %c failed to bring out of Lane reset after %dus.\n",
2606 phy_name(phy), XELPDP_PORT_RESET_START_TIMEOUT_US);
2608 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(port),
2609 intel_cx0_get_pclk_refclk_request(owned_lane_mask),
2610 intel_cx0_get_pclk_refclk_request(lane_mask));
2612 if (__intel_de_wait_for_register(i915, XELPDP_PORT_CLOCK_CTL(port),
2613 intel_cx0_get_pclk_refclk_ack(owned_lane_mask),
2614 intel_cx0_get_pclk_refclk_ack(lane_mask),
2615 XELPDP_REFCLK_ENABLE_TIMEOUT_US, 0, NULL))
2616 drm_warn(&i915->drm, "PHY %c failed to request refclk after %dus.\n",
2617 phy_name(phy), XELPDP_REFCLK_ENABLE_TIMEOUT_US);
2619 intel_cx0_powerdown_change_sequence(i915, port, INTEL_CX0_BOTH_LANES,
2620 CX0_P2_STATE_RESET);
2621 intel_cx0_setup_powerdown(i915, port);
2623 intel_de_rmw(i915, XELPDP_PORT_BUF_CTL2(port), lane_pipe_reset, 0);
2625 if (intel_de_wait_for_clear(i915, XELPDP_PORT_BUF_CTL2(port), lane_phy_current_status,
2626 XELPDP_PORT_RESET_END_TIMEOUT))
2627 drm_warn(&i915->drm, "PHY %c failed to bring out of Lane reset after %dms.\n",
2628 phy_name(phy), XELPDP_PORT_RESET_END_TIMEOUT);
2631 static void intel_cx0_program_phy_lane(struct drm_i915_private *i915,
2632 struct intel_encoder *encoder, int lane_count,
2637 bool dp_alt_mode = intel_tc_port_in_dp_alt_mode(enc_to_dig_port(encoder));
2638 u8 owned_lane_mask = intel_cx0_get_owned_lane_mask(i915, encoder);
2639 enum port port = encoder->port;
2641 if (intel_is_c10phy(i915, intel_port_to_phy(i915, port)))
2642 intel_cx0_rmw(i915, port, owned_lane_mask,
2643 PHY_C10_VDR_CONTROL(1), 0,
2644 C10_VDR_CTRL_MSGBUS_ACCESS,
2645 MB_WRITE_COMMITTED);
2648 disables = REG_GENMASK8(3, 0) >> lane_count;
2650 disables = REG_GENMASK8(3, 0) << lane_count;
2652 if (dp_alt_mode && lane_count == 1) {
2653 disables &= ~REG_GENMASK8(1, 0);
2654 disables |= REG_FIELD_PREP8(REG_GENMASK8(1, 0), 0x1);
2657 for (i = 0; i < 4; i++) {
2659 u8 lane_mask = i < 2 ? INTEL_CX0_LANE0 : INTEL_CX0_LANE1;
2661 if (!(owned_lane_mask & lane_mask))
2664 intel_cx0_rmw(i915, port, lane_mask, PHY_CX0_TX_CONTROL(tx, 2),
2665 CONTROL2_DISABLE_SINGLE_TX,
2666 disables & BIT(i) ? CONTROL2_DISABLE_SINGLE_TX : 0,
2667 MB_WRITE_COMMITTED);
2670 if (intel_is_c10phy(i915, intel_port_to_phy(i915, port)))
2671 intel_cx0_rmw(i915, port, owned_lane_mask,
2672 PHY_C10_VDR_CONTROL(1), 0,
2673 C10_VDR_CTRL_UPDATE_CFG,
2674 MB_WRITE_COMMITTED);
2677 static u32 intel_cx0_get_pclk_pll_request(u8 lane_mask)
2682 for_each_cx0_lane_in_mask(lane_mask, lane)
2683 val |= XELPDP_LANE_PCLK_PLL_REQUEST(lane);
2688 static u32 intel_cx0_get_pclk_pll_ack(u8 lane_mask)
2693 for_each_cx0_lane_in_mask(lane_mask, lane)
2694 val |= XELPDP_LANE_PCLK_PLL_ACK(lane);
2699 static void intel_cx0pll_enable(struct intel_encoder *encoder,
2700 const struct intel_crtc_state *crtc_state)
2702 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2703 enum phy phy = intel_port_to_phy(i915, encoder->port);
2704 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2705 bool lane_reversal = dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;
2706 u8 maxpclk_lane = lane_reversal ? INTEL_CX0_LANE1 :
2708 intel_wakeref_t wakeref = intel_cx0_phy_transaction_begin(encoder);
2711 * 1. Program PORT_CLOCK_CTL REGISTER to configure
2712 * clock muxes, gating and SSC
2714 intel_program_port_clock_ctl(encoder, crtc_state, lane_reversal);
2716 /* 2. Bring PHY out of reset. */
2717 intel_cx0_phy_lane_reset(i915, encoder, lane_reversal);
2720 * 3. Change Phy power state to Ready.
2721 * TODO: For DP alt mode use only one lane.
2723 intel_cx0_powerdown_change_sequence(i915, encoder->port, INTEL_CX0_BOTH_LANES,
2724 CX0_P2_STATE_READY);
2727 * 4. Program PORT_MSGBUS_TIMER register's Message Bus Timer field to 0xA000.
2728 * (This is done inside intel_cx0_phy_transaction_begin(), since we would need
2729 * the right timer thresholds for readouts too.)
2732 /* 5. Program PHY internal PLL internal registers. */
2733 if (intel_is_c10phy(i915, phy))
2734 intel_c10_pll_program(i915, crtc_state, encoder);
2736 intel_c20_pll_program(i915, crtc_state, encoder);
2739 * 6. Program the enabled and disabled owned PHY lane
2740 * transmitters over message bus
2742 intel_cx0_program_phy_lane(i915, encoder, crtc_state->lane_count, lane_reversal);
2745 * 7. Follow the Display Voltage Frequency Switching - Sequence
2746 * Before Frequency Change. We handle this step in bxt_set_cdclk().
2750 * 8. Program DDI_CLK_VALFREQ to match intended DDI
2753 intel_de_write(i915, DDI_CLK_VALFREQ(encoder->port),
2754 crtc_state->port_clock);
2757 * 9. Set PORT_CLOCK_CTL register PCLK PLL Request
2758 * LN<Lane for maxPCLK> to "1" to enable PLL.
2760 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2761 intel_cx0_get_pclk_pll_request(INTEL_CX0_BOTH_LANES),
2762 intel_cx0_get_pclk_pll_request(maxpclk_lane));
2764 /* 10. Poll on PORT_CLOCK_CTL PCLK PLL Ack LN<Lane for maxPCLK> == "1". */
2765 if (__intel_de_wait_for_register(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2766 intel_cx0_get_pclk_pll_ack(INTEL_CX0_BOTH_LANES),
2767 intel_cx0_get_pclk_pll_ack(maxpclk_lane),
2768 XELPDP_PCLK_PLL_ENABLE_TIMEOUT_US, 0, NULL))
2769 drm_warn(&i915->drm, "Port %c PLL not locked after %dus.\n",
2770 phy_name(phy), XELPDP_PCLK_PLL_ENABLE_TIMEOUT_US);
2773 * 11. Follow the Display Voltage Frequency Switching Sequence After
2774 * Frequency Change. We handle this step in bxt_set_cdclk().
2777 /* TODO: enable TBT-ALT mode */
2778 intel_cx0_phy_transaction_end(encoder, wakeref);
2781 int intel_mtl_tbt_calc_port_clock(struct intel_encoder *encoder)
2783 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2785 u32 val = intel_de_read(i915, XELPDP_PORT_CLOCK_CTL(encoder->port));
2787 clock = REG_FIELD_GET(XELPDP_DDI_CLOCK_SELECT_MASK, val);
2789 drm_WARN_ON(&i915->drm, !(val & XELPDP_FORWARD_CLOCK_UNGATE));
2790 drm_WARN_ON(&i915->drm, !(val & XELPDP_TBT_CLOCK_REQUEST));
2791 drm_WARN_ON(&i915->drm, !(val & XELPDP_TBT_CLOCK_ACK));
2794 case XELPDP_DDI_CLOCK_SELECT_TBT_162:
2796 case XELPDP_DDI_CLOCK_SELECT_TBT_270:
2798 case XELPDP_DDI_CLOCK_SELECT_TBT_540:
2800 case XELPDP_DDI_CLOCK_SELECT_TBT_810:
2803 MISSING_CASE(clock);
2808 static int intel_mtl_tbt_clock_select(struct drm_i915_private *i915, int clock)
2812 return XELPDP_DDI_CLOCK_SELECT_TBT_162;
2814 return XELPDP_DDI_CLOCK_SELECT_TBT_270;
2816 return XELPDP_DDI_CLOCK_SELECT_TBT_540;
2818 return XELPDP_DDI_CLOCK_SELECT_TBT_810;
2820 MISSING_CASE(clock);
2821 return XELPDP_DDI_CLOCK_SELECT_TBT_162;
2825 static void intel_mtl_tbt_pll_enable(struct intel_encoder *encoder,
2826 const struct intel_crtc_state *crtc_state)
2828 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2829 enum phy phy = intel_port_to_phy(i915, encoder->port);
2833 * 1. Program PORT_CLOCK_CTL REGISTER to configure
2834 * clock muxes, gating and SSC
2836 val |= XELPDP_DDI_CLOCK_SELECT(intel_mtl_tbt_clock_select(i915, crtc_state->port_clock));
2837 val |= XELPDP_FORWARD_CLOCK_UNGATE;
2838 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2839 XELPDP_DDI_CLOCK_SELECT_MASK | XELPDP_FORWARD_CLOCK_UNGATE, val);
2841 /* 2. Read back PORT_CLOCK_CTL REGISTER */
2842 val = intel_de_read(i915, XELPDP_PORT_CLOCK_CTL(encoder->port));
2845 * 3. Follow the Display Voltage Frequency Switching - Sequence
2846 * Before Frequency Change. We handle this step in bxt_set_cdclk().
2850 * 4. Set PORT_CLOCK_CTL register TBT CLOCK Request to "1" to enable PLL.
2852 val |= XELPDP_TBT_CLOCK_REQUEST;
2853 intel_de_write(i915, XELPDP_PORT_CLOCK_CTL(encoder->port), val);
2855 /* 5. Poll on PORT_CLOCK_CTL TBT CLOCK Ack == "1". */
2856 if (__intel_de_wait_for_register(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2857 XELPDP_TBT_CLOCK_ACK,
2858 XELPDP_TBT_CLOCK_ACK,
2860 drm_warn(&i915->drm, "[ENCODER:%d:%s][%c] PHY PLL not locked after 100us.\n",
2861 encoder->base.base.id, encoder->base.name, phy_name(phy));
2864 * 6. Follow the Display Voltage Frequency Switching Sequence After
2865 * Frequency Change. We handle this step in bxt_set_cdclk().
2869 * 7. Program DDI_CLK_VALFREQ to match intended DDI
2872 intel_de_write(i915, DDI_CLK_VALFREQ(encoder->port),
2873 crtc_state->port_clock);
2876 void intel_mtl_pll_enable(struct intel_encoder *encoder,
2877 const struct intel_crtc_state *crtc_state)
2879 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2881 if (intel_tc_port_in_tbt_alt_mode(dig_port))
2882 intel_mtl_tbt_pll_enable(encoder, crtc_state);
2884 intel_cx0pll_enable(encoder, crtc_state);
2887 static void intel_cx0pll_disable(struct intel_encoder *encoder)
2889 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2890 enum phy phy = intel_port_to_phy(i915, encoder->port);
2891 bool is_c10 = intel_is_c10phy(i915, phy);
2892 intel_wakeref_t wakeref = intel_cx0_phy_transaction_begin(encoder);
2894 /* 1. Change owned PHY lane power to Disable state. */
2895 intel_cx0_powerdown_change_sequence(i915, encoder->port, INTEL_CX0_BOTH_LANES,
2896 is_c10 ? CX0_P2PG_STATE_DISABLE :
2897 CX0_P4PG_STATE_DISABLE);
2900 * 2. Follow the Display Voltage Frequency Switching Sequence Before
2901 * Frequency Change. We handle this step in bxt_set_cdclk().
2905 * 3. Set PORT_CLOCK_CTL register PCLK PLL Request LN<Lane for maxPCLK>
2906 * to "0" to disable PLL.
2908 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2909 intel_cx0_get_pclk_pll_request(INTEL_CX0_BOTH_LANES) |
2910 intel_cx0_get_pclk_refclk_request(INTEL_CX0_BOTH_LANES), 0);
2912 /* 4. Program DDI_CLK_VALFREQ to 0. */
2913 intel_de_write(i915, DDI_CLK_VALFREQ(encoder->port), 0);
2916 * 5. Poll on PORT_CLOCK_CTL PCLK PLL Ack LN<Lane for maxPCLK**> == "0".
2918 if (__intel_de_wait_for_register(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2919 intel_cx0_get_pclk_pll_ack(INTEL_CX0_BOTH_LANES) |
2920 intel_cx0_get_pclk_refclk_ack(INTEL_CX0_BOTH_LANES), 0,
2921 XELPDP_PCLK_PLL_DISABLE_TIMEOUT_US, 0, NULL))
2922 drm_warn(&i915->drm, "Port %c PLL not unlocked after %dus.\n",
2923 phy_name(phy), XELPDP_PCLK_PLL_DISABLE_TIMEOUT_US);
2926 * 6. Follow the Display Voltage Frequency Switching Sequence After
2927 * Frequency Change. We handle this step in bxt_set_cdclk().
2930 /* 7. Program PORT_CLOCK_CTL register to disable and gate clocks. */
2931 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2932 XELPDP_DDI_CLOCK_SELECT_MASK, 0);
2933 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2934 XELPDP_FORWARD_CLOCK_UNGATE, 0);
2936 intel_cx0_phy_transaction_end(encoder, wakeref);
2939 static void intel_mtl_tbt_pll_disable(struct intel_encoder *encoder)
2941 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2942 enum phy phy = intel_port_to_phy(i915, encoder->port);
2945 * 1. Follow the Display Voltage Frequency Switching Sequence Before
2946 * Frequency Change. We handle this step in bxt_set_cdclk().
2950 * 2. Set PORT_CLOCK_CTL register TBT CLOCK Request to "0" to disable PLL.
2952 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2953 XELPDP_TBT_CLOCK_REQUEST, 0);
2955 /* 3. Poll on PORT_CLOCK_CTL TBT CLOCK Ack == "0". */
2956 if (__intel_de_wait_for_register(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2957 XELPDP_TBT_CLOCK_ACK, 0, 10, 0, NULL))
2958 drm_warn(&i915->drm, "[ENCODER:%d:%s][%c] PHY PLL not unlocked after 10us.\n",
2959 encoder->base.base.id, encoder->base.name, phy_name(phy));
2962 * 4. Follow the Display Voltage Frequency Switching Sequence After
2963 * Frequency Change. We handle this step in bxt_set_cdclk().
2967 * 5. Program PORT CLOCK CTRL register to disable and gate clocks
2969 intel_de_rmw(i915, XELPDP_PORT_CLOCK_CTL(encoder->port),
2970 XELPDP_DDI_CLOCK_SELECT_MASK |
2971 XELPDP_FORWARD_CLOCK_UNGATE, 0);
2973 /* 6. Program DDI_CLK_VALFREQ to 0. */
2974 intel_de_write(i915, DDI_CLK_VALFREQ(encoder->port), 0);
2977 void intel_mtl_pll_disable(struct intel_encoder *encoder)
2979 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2981 if (intel_tc_port_in_tbt_alt_mode(dig_port))
2982 intel_mtl_tbt_pll_disable(encoder);
2984 intel_cx0pll_disable(encoder);
2987 enum icl_port_dpll_id
2988 intel_mtl_port_pll_type(struct intel_encoder *encoder,
2989 const struct intel_crtc_state *crtc_state)
2991 struct drm_i915_private *i915 = to_i915(encoder->base.dev);
2993 * TODO: Determine the PLL type from the SW state, once MTL PLL
2994 * handling is done via the standard shared DPLL framework.
2996 u32 val = intel_de_read(i915, XELPDP_PORT_CLOCK_CTL(encoder->port));
2997 u32 clock = REG_FIELD_GET(XELPDP_DDI_CLOCK_SELECT_MASK, val);
2999 if (clock == XELPDP_DDI_CLOCK_SELECT_MAXPCLK ||
3000 clock == XELPDP_DDI_CLOCK_SELECT_DIV18CLK)
3001 return ICL_PORT_DPLL_MG_PHY;
3003 return ICL_PORT_DPLL_DEFAULT;
3006 void intel_c10pll_state_verify(struct intel_atomic_state *state,
3007 struct intel_crtc *crtc)
3009 struct drm_i915_private *i915 = to_i915(state->base.dev);
3010 const struct intel_crtc_state *new_crtc_state =
3011 intel_atomic_get_new_crtc_state(state, crtc);
3012 struct intel_c10pll_state mpllb_hw_state = {};
3013 const struct intel_c10pll_state *mpllb_sw_state = &new_crtc_state->cx0pll_state.c10;
3014 struct intel_encoder *encoder;
3018 if (DISPLAY_VER(i915) < 14)
3021 if (!new_crtc_state->hw.active)
3024 /* intel_get_crtc_new_encoder() only works for modeset/fastset commits */
3025 if (!intel_crtc_needs_modeset(new_crtc_state) &&
3026 !intel_crtc_needs_fastset(new_crtc_state))
3029 encoder = intel_get_crtc_new_encoder(state, new_crtc_state);
3030 phy = intel_port_to_phy(i915, encoder->port);
3032 if (!intel_is_c10phy(i915, phy))
3035 intel_c10pll_readout_hw_state(encoder, &mpllb_hw_state);
3037 for (i = 0; i < ARRAY_SIZE(mpllb_sw_state->pll); i++) {
3038 u8 expected = mpllb_sw_state->pll[i];
3040 I915_STATE_WARN(i915, mpllb_hw_state.pll[i] != expected,
3041 "[CRTC:%d:%s] mismatch in C10MPLLB: Register[%d] (expected 0x%02x, found 0x%02x)",
3042 crtc->base.base.id, crtc->base.name, i,
3043 expected, mpllb_hw_state.pll[i]);
3046 I915_STATE_WARN(i915, mpllb_hw_state.tx != mpllb_sw_state->tx,
3047 "[CRTC:%d:%s] mismatch in C10MPLLB: Register TX0 (expected 0x%02x, found 0x%02x)",
3048 crtc->base.base.id, crtc->base.name,
3049 mpllb_sw_state->tx, mpllb_hw_state.tx);
3051 I915_STATE_WARN(i915, mpllb_hw_state.cmn != mpllb_sw_state->cmn,
3052 "[CRTC:%d:%s] mismatch in C10MPLLB: Register CMN0 (expected 0x%02x, found 0x%02x)",
3053 crtc->base.base.id, crtc->base.name,
3054 mpllb_sw_state->cmn, mpllb_hw_state.cmn);