1 /* SPDX-License-Identifier: MIT */
3 * Copyright © 2019 Intel Corporation
9 #include "intel_display_conversion.h"
10 #include "intel_display_core.h"
11 #include "intel_dmc_wl.h"
12 #include "intel_dsb.h"
13 #include "intel_uncore.h"
14 #include "intel_uncore_trace.h"
16 static inline struct intel_uncore *__to_uncore(struct intel_display *display)
18 return to_intel_uncore(display->drm);
22 __intel_de_read(struct intel_display *display, i915_reg_t reg)
26 intel_dmc_wl_get(display, reg);
28 val = intel_uncore_read(__to_uncore(display), reg);
30 intel_dmc_wl_put(display, reg);
34 #define intel_de_read(p,...) __intel_de_read(__to_intel_display(p), __VA_ARGS__)
37 intel_de_read8(struct intel_display *display, i915_reg_t reg)
41 intel_dmc_wl_get(display, reg);
43 val = intel_uncore_read8(__to_uncore(display), reg);
45 intel_dmc_wl_put(display, reg);
51 intel_de_read64_2x32(struct intel_display *display,
52 i915_reg_t lower_reg, i915_reg_t upper_reg)
56 intel_dmc_wl_get(display, lower_reg);
57 intel_dmc_wl_get(display, upper_reg);
59 val = intel_uncore_read64_2x32(__to_uncore(display), lower_reg,
62 intel_dmc_wl_put(display, upper_reg);
63 intel_dmc_wl_put(display, lower_reg);
69 __intel_de_posting_read(struct intel_display *display, i915_reg_t reg)
71 intel_dmc_wl_get(display, reg);
73 intel_uncore_posting_read(__to_uncore(display), reg);
75 intel_dmc_wl_put(display, reg);
77 #define intel_de_posting_read(p,...) __intel_de_posting_read(__to_intel_display(p), __VA_ARGS__)
80 __intel_de_write(struct intel_display *display, i915_reg_t reg, u32 val)
82 intel_dmc_wl_get(display, reg);
84 intel_uncore_write(__to_uncore(display), reg, val);
86 intel_dmc_wl_put(display, reg);
88 #define intel_de_write(p,...) __intel_de_write(__to_intel_display(p), __VA_ARGS__)
91 __intel_de_rmw_nowl(struct intel_display *display, i915_reg_t reg,
94 return intel_uncore_rmw(__to_uncore(display), reg, clear, set);
98 __intel_de_rmw(struct intel_display *display, i915_reg_t reg, u32 clear,
103 intel_dmc_wl_get(display, reg);
105 val = __intel_de_rmw_nowl(display, reg, clear, set);
107 intel_dmc_wl_put(display, reg);
111 #define intel_de_rmw(p,...) __intel_de_rmw(__to_intel_display(p), __VA_ARGS__)
114 __intel_de_wait_for_register_nowl(struct intel_display *display,
116 u32 mask, u32 value, unsigned int timeout)
118 return intel_wait_for_register(__to_uncore(display), reg, mask,
123 __intel_de_wait_for_register_atomic_nowl(struct intel_display *display,
126 unsigned int fast_timeout_us)
128 return __intel_wait_for_register(__to_uncore(display), reg, mask,
129 value, fast_timeout_us, 0, NULL);
133 intel_de_wait(struct intel_display *display, i915_reg_t reg,
134 u32 mask, u32 value, unsigned int timeout)
138 intel_dmc_wl_get(display, reg);
140 ret = __intel_de_wait_for_register_nowl(display, reg, mask, value,
143 intel_dmc_wl_put(display, reg);
149 intel_de_wait_fw(struct intel_display *display, i915_reg_t reg,
150 u32 mask, u32 value, unsigned int timeout)
154 intel_dmc_wl_get(display, reg);
156 ret = intel_wait_for_register_fw(__to_uncore(display), reg, mask,
159 intel_dmc_wl_put(display, reg);
165 intel_de_wait_custom(struct intel_display *display, i915_reg_t reg,
167 unsigned int fast_timeout_us,
168 unsigned int slow_timeout_ms, u32 *out_value)
172 intel_dmc_wl_get(display, reg);
174 ret = __intel_wait_for_register(__to_uncore(display), reg, mask,
176 fast_timeout_us, slow_timeout_ms, out_value);
178 intel_dmc_wl_put(display, reg);
184 __intel_de_wait_for_set(struct intel_display *display, i915_reg_t reg,
185 u32 mask, unsigned int timeout)
187 return intel_de_wait(display, reg, mask, mask, timeout);
189 #define intel_de_wait_for_set(p,...) __intel_de_wait_for_set(__to_intel_display(p), __VA_ARGS__)
192 __intel_de_wait_for_clear(struct intel_display *display, i915_reg_t reg,
193 u32 mask, unsigned int timeout)
195 return intel_de_wait(display, reg, mask, 0, timeout);
197 #define intel_de_wait_for_clear(p,...) __intel_de_wait_for_clear(__to_intel_display(p), __VA_ARGS__)
200 * Unlocked mmio-accessors, think carefully before using these.
202 * Certain architectures will die if the same cacheline is concurrently accessed
203 * by different clients (e.g. on Ivybridge). Access to registers should
204 * therefore generally be serialised, by either the dev_priv->uncore.lock or
205 * a more localised lock guarding all access to that bank of registers.
208 __intel_de_read_fw(struct intel_display *display, i915_reg_t reg)
212 val = intel_uncore_read_fw(__to_uncore(display), reg);
213 trace_i915_reg_rw(false, reg, val, sizeof(val), true);
217 #define intel_de_read_fw(p,...) __intel_de_read_fw(__to_intel_display(p), __VA_ARGS__)
220 __intel_de_write_fw(struct intel_display *display, i915_reg_t reg, u32 val)
222 trace_i915_reg_rw(true, reg, val, sizeof(val), true);
223 intel_uncore_write_fw(__to_uncore(display), reg, val);
225 #define intel_de_write_fw(p,...) __intel_de_write_fw(__to_intel_display(p), __VA_ARGS__)
228 intel_de_read_notrace(struct intel_display *display, i915_reg_t reg)
230 return intel_uncore_read_notrace(__to_uncore(display), reg);
234 intel_de_write_notrace(struct intel_display *display, i915_reg_t reg, u32 val)
236 intel_uncore_write_notrace(__to_uncore(display), reg, val);
239 static __always_inline void
240 intel_de_write_dsb(struct intel_display *display, struct intel_dsb *dsb,
241 i915_reg_t reg, u32 val)
244 intel_dsb_reg_write(dsb, reg, val);
246 intel_de_write_fw(display, reg, val);
249 #endif /* __INTEL_DE_H__ */