This source file includes following definitions.
- vlv_get_hpll_vco
- vlv_get_cck_clock
- vlv_get_cck_clock_hpll
- intel_update_czclk
- intel_fdi_link_freq
- skl_wa_827
- icl_wa_scalerclkgating
- needs_modeset
- pnv_calc_dpll_params
- i9xx_dpll_compute_m
- i9xx_calc_dpll_params
- vlv_calc_dpll_params
- chv_calc_dpll_params
- intel_PLL_is_valid
- i9xx_select_p2_div
- i9xx_find_best_dpll
- pnv_find_best_dpll
- g4x_find_best_dpll
- vlv_PLL_is_optimal
- vlv_find_best_dpll
- chv_find_best_dpll
- bxt_find_best_dpll
- intel_crtc_active
- intel_pipe_to_cpu_transcoder
- pipe_scanline_is_moving
- wait_for_pipe_scanline_moving
- intel_wait_for_pipe_scanline_stopped
- intel_wait_for_pipe_scanline_moving
- intel_wait_for_pipe_off
- assert_pll
- assert_dsi_pll
- assert_fdi_tx
- assert_fdi_rx
- assert_fdi_tx_pll_enabled
- assert_fdi_rx_pll
- assert_panel_unlocked
- assert_pipe
- assert_plane
- assert_planes_disabled
- assert_vblank_disabled
- assert_pch_transcoder_disabled
- assert_pch_dp_disabled
- assert_pch_hdmi_disabled
- assert_pch_ports_disabled
- _vlv_enable_pll
- vlv_enable_pll
- _chv_enable_pll
- chv_enable_pll
- i9xx_has_pps
- i9xx_enable_pll
- i9xx_disable_pll
- vlv_disable_pll
- chv_disable_pll
- vlv_wait_port_ready
- ironlake_enable_pch_transcoder
- lpt_enable_pch_transcoder
- ironlake_disable_pch_transcoder
- lpt_disable_pch_transcoder
- intel_crtc_pch_transcoder
- intel_crtc_max_vblank_count
- intel_crtc_vblank_on
- intel_enable_pipe
- intel_disable_pipe
- intel_tile_size
- intel_tile_width_bytes
- intel_tile_height
- intel_tile_dims
- intel_fb_align_height
- intel_rotation_info_size
- intel_remapped_info_size
- intel_fill_fb_ggtt_view
- intel_cursor_alignment
- intel_linear_alignment
- intel_surf_alignment
- intel_plane_uses_fence
- intel_pin_and_fence_fb_obj
- intel_unpin_fb_vma
- intel_fb_pitch
- intel_fb_xy_to_linear
- intel_add_fb_offsets
- intel_adjust_tile_offset
- is_surface_linear
- intel_adjust_aligned_offset
- intel_plane_adjust_aligned_offset
- intel_compute_aligned_offset
- intel_plane_compute_aligned_offset
- intel_fb_offset_to_xy
- intel_fb_modifier_to_tiling
- lookup_format_info
- intel_get_format_info
- is_ccs_modifier
- intel_plane_fb_max_stride
- intel_fb_max_stride
- intel_fb_stride_alignment
- intel_plane_can_remap
- intel_plane_needs_remap
- intel_fill_fb_info
- intel_plane_remap_gtt
- intel_plane_compute_gtt
- i9xx_format_to_fourcc
- skl_format_to_fourcc
- intel_alloc_initial_plane_obj
- intel_set_plane_visible
- fixup_active_planes
- intel_plane_disable_noatomic
- to_intel_frontbuffer
- intel_find_initial_plane_obj
- skl_max_plane_width
- glk_max_plane_width
- icl_max_plane_width
- skl_check_main_ccs_coordinates
- skl_check_main_surface
- skl_check_nv12_aux_surface
- skl_check_ccs_aux_surface
- skl_check_plane_surface
- i9xx_plane_max_stride
- i9xx_plane_ctl_crtc
- i9xx_plane_ctl
- i9xx_check_plane_surface
- i9xx_plane_has_windowing
- i9xx_plane_check
- i9xx_update_plane
- i9xx_disable_plane
- i9xx_plane_get_hw_state
- skl_detach_scaler
- skl_detach_scalers
- skl_plane_stride_mult
- skl_plane_stride
- skl_plane_ctl_format
- skl_plane_ctl_alpha
- glk_plane_color_ctl_alpha
- skl_plane_ctl_tiling
- skl_plane_ctl_rotate
- cnl_plane_ctl_flip
- skl_plane_ctl_crtc
- skl_plane_ctl
- glk_plane_color_ctl_crtc
- glk_plane_color_ctl
- __intel_display_resume
- gpu_reset_clobbers_display
- intel_prepare_reset
- intel_finish_reset
- icl_set_pipe_chicken
- intel_update_pipe_config
- intel_fdi_normal_train
- ironlake_fdi_link_train
- gen6_fdi_link_train
- ivb_manual_fdi_link_train
- ironlake_fdi_pll_enable
- ironlake_fdi_pll_disable
- ironlake_fdi_disable
- intel_has_pending_fb_unpin
- lpt_disable_iclkip
- lpt_program_iclkip
- lpt_get_iclkip
- ironlake_pch_transcoder_set_timings
- cpt_set_fdi_bc_bifurcation
- ivybridge_update_fdi_bc_bifurcation
- intel_get_crtc_new_encoder
- ironlake_pch_enable
- lpt_pch_enable
- cpt_verify_modeset
- skl_scaler_calc_phase
- skl_update_scaler
- skl_update_scaler_crtc
- skl_update_scaler_plane
- skylake_scaler_disable
- skylake_pfit_enable
- ironlake_pfit_enable
- hsw_enable_ips
- hsw_disable_ips
- intel_crtc_dpms_overlay_disable
- intel_post_enable_primary
- intel_pre_disable_primary_noatomic
- hsw_pre_update_disable_ips
- hsw_post_update_enable_ips
- needs_nv12_wa
- needs_scalerclk_wa
- intel_post_plane_update
- intel_pre_plane_update
- intel_crtc_disable_planes
- intel_connector_primary_encoder
- intel_connector_needs_modeset
- intel_encoders_update_prepare
- intel_encoders_update_complete
- intel_encoders_pre_pll_enable
- intel_encoders_pre_enable
- intel_encoders_enable
- intel_encoders_disable
- intel_encoders_post_disable
- intel_encoders_post_pll_disable
- intel_encoders_update_pipe
- intel_disable_primary_plane
- ironlake_crtc_enable
- hsw_crtc_supports_ips
- glk_pipe_scaler_clock_gating_wa
- icl_pipe_mbus_enable
- haswell_crtc_enable
- ironlake_pfit_disable
- ironlake_crtc_disable
- haswell_crtc_disable
- i9xx_pfit_enable
- intel_phy_is_combo
- intel_phy_is_tc
- intel_port_to_phy
- intel_port_to_tc
- intel_port_to_power_domain
- intel_aux_power_domain
- get_crtc_power_domains
- modeset_get_crtc_power_domains
- modeset_put_power_domains
- valleyview_crtc_enable
- i9xx_set_pll_dividers
- i9xx_crtc_enable
- i9xx_pfit_disable
- i9xx_crtc_disable
- intel_crtc_disable_noatomic
- intel_display_suspend
- intel_encoder_destroy
- intel_connector_verify_state
- pipe_required_fdi_lanes
- ironlake_check_fdi_lanes
- ironlake_fdi_compute_config
- hsw_crtc_state_ips_capable
- hsw_compute_ips_config
- intel_crtc_supports_double_wide
- ilk_pipe_pixel_rate
- intel_crtc_compute_pixel_rate
- intel_crtc_compute_config
- intel_reduce_m_n_ratio
- compute_m_n
- intel_link_compute_m_n
- intel_panel_use_ssc
- pnv_dpll_compute_fp
- i9xx_dpll_compute_fp
- i9xx_update_pll_dividers
- vlv_pllb_recal_opamp
- intel_pch_transcoder_set_m_n
- transcoder_has_m2_n2
- intel_cpu_transcoder_set_m_n
- intel_dp_set_m_n
- vlv_compute_dpll
- chv_compute_dpll
- vlv_prepare_pll
- chv_prepare_pll
- vlv_force_pll_on
- vlv_force_pll_off
- i9xx_compute_dpll
- i8xx_compute_dpll
- intel_set_pipe_timings
- intel_set_pipe_src_size
- intel_get_pipe_timings
- intel_get_pipe_src_size
- intel_mode_from_pipe_config
- i9xx_set_pipeconf
- i8xx_crtc_compute_clock
- g4x_crtc_compute_clock
- pnv_crtc_compute_clock
- i9xx_crtc_compute_clock
- chv_crtc_compute_clock
- vlv_crtc_compute_clock
- i9xx_has_pfit
- i9xx_get_pfit_config
- vlv_crtc_clock_get
- i9xx_get_initial_plane_config
- chv_crtc_clock_get
- intel_get_crtc_ycbcr_config
- i9xx_get_pipe_color_config
- i9xx_get_pipe_config
- ironlake_init_pch_refclk
- lpt_reset_fdi_mphy
- lpt_program_fdi_mphy
- lpt_enable_clkout_dp
- lpt_disable_clkout_dp
- lpt_bend_clkout_dp
- spll_uses_pch_ssc
- wrpll_uses_pch_ssc
- lpt_init_pch_refclk
- intel_init_pch_refclk
- ironlake_set_pipeconf
- haswell_set_pipeconf
- bdw_set_pipemisc
- bdw_get_pipemisc_bpp
- ironlake_get_lanes_required
- ironlake_needs_fb_cb_tune
- ironlake_compute_dpll
- ironlake_crtc_compute_clock
- intel_pch_transcoder_get_m_n
- intel_cpu_transcoder_get_m_n
- intel_dp_get_m_n
- ironlake_get_fdi_m_n_config
- skylake_get_pfit_config
- skylake_get_initial_plane_config
- ironlake_get_pfit_config
- ironlake_get_pipe_config
- haswell_crtc_compute_clock
- cannonlake_get_ddi_pll
- icelake_get_ddi_pll
- bxt_get_ddi_pll
- skylake_get_ddi_pll
- haswell_get_ddi_pll
- hsw_get_transcoder_state
- bxt_get_dsi_transcoder_state
- haswell_get_ddi_port_state
- haswell_get_pipe_config
- intel_cursor_base
- intel_cursor_position
- intel_cursor_size_ok
- intel_cursor_check_surface
- intel_check_cursor
- i845_cursor_max_stride
- i845_cursor_ctl_crtc
- i845_cursor_ctl
- i845_cursor_size_ok
- i845_check_cursor
- i845_update_cursor
- i845_disable_cursor
- i845_cursor_get_hw_state
- i9xx_cursor_max_stride
- i9xx_cursor_ctl_crtc
- i9xx_cursor_ctl
- i9xx_cursor_size_ok
- i9xx_check_cursor
- i9xx_update_cursor
- i9xx_disable_cursor
- i9xx_cursor_get_hw_state
- intel_framebuffer_create
- intel_modeset_disable_planes
- intel_get_load_detect_pipe
- intel_release_load_detect_pipe
- i9xx_pll_refclk
- i9xx_crtc_clock_get
- intel_dotclock_calculate
- ironlake_pch_clock_get
- intel_encoder_current_mode
- intel_crtc_destroy
- intel_wm_need_update
- needs_scaling
- intel_plane_atomic_calc_changes
- encoders_cloneable
- check_single_encoder_cloning
- icl_add_linked_planes
- icl_check_nv12_planes
- c8_planes_changed
- intel_crtc_atomic_check
- intel_modeset_update_connector_atomic_state
- compute_sink_pipe_bpp
- compute_baseline_pipe_bpp
- intel_dump_crtc_timings
- intel_dump_m_n_config
- intel_dump_infoframe
- snprintf_output_types
- output_formats
- intel_dump_plane_state
- intel_dump_pipe_config
- check_digital_port_conflicts
- clear_intel_crtc_state
- intel_modeset_pipe_config
- intel_fuzzy_clock_check
- intel_compare_m_n
- intel_compare_link_m_n
- intel_compare_infoframe
- pipe_config_infoframe_mismatch
- __printf
- fastboot_enabled
- intel_pipe_config_compare
- intel_pipe_config_sanity_check
- verify_wm_state
- verify_connector_state
- verify_encoder_state
- verify_crtc_state
- intel_verify_planes
- verify_single_dpll_state
- verify_shared_dpll_state
- intel_modeset_verify_crtc
- verify_disabled_dpll_state
- intel_modeset_verify_disabled
- update_scanline_offset
- intel_modeset_clear_plls
- haswell_mode_set_planes_workaround
- intel_lock_all_pipes
- intel_modeset_all_pipes
- intel_modeset_checks
- calc_watermark_data
- intel_crtc_check_fastset
- intel_atomic_check
- intel_atomic_prepare_commit
- intel_crtc_get_vblank_counter
- intel_update_crtc
- intel_update_crtcs
- skl_update_crtcs
- intel_atomic_helper_free_state
- intel_atomic_helper_free_state_worker
- intel_atomic_commit_fence_wait
- intel_atomic_cleanup_work
- intel_atomic_commit_tail
- intel_atomic_commit_work
- intel_atomic_commit_ready
- intel_atomic_track_fbs
- intel_atomic_commit
- do_rps_boost
- add_rps_boost_after_vblank
- intel_plane_pin_fb
- intel_plane_unpin_fb
- fb_obj_bump_render_priority
- intel_prepare_plane_fb
- intel_cleanup_plane_fb
- skl_max_scale
- intel_begin_crtc_commit
- intel_crtc_arm_fifo_underrun
- intel_finish_crtc_commit
- intel_plane_destroy
- i8xx_plane_format_mod_supported
- i965_plane_format_mod_supported
- intel_cursor_format_mod_supported
- intel_legacy_cursor_update
- i9xx_plane_has_fbc
- intel_primary_plane_create
- intel_cursor_plane_create
- intel_crtc_init_scalers
- intel_crtc_init
- intel_get_pipe_from_crtc_id_ioctl
- intel_encoder_clones
- ilk_has_edp_a
- intel_ddi_crt_present
- intel_pps_unlock_regs_wa
- intel_pps_init
- intel_setup_outputs
- intel_user_framebuffer_destroy
- intel_user_framebuffer_create_handle
- intel_user_framebuffer_dirty
- intel_framebuffer_init
- intel_user_framebuffer_create
- intel_atomic_state_free
- intel_mode_valid
- intel_init_display_hooks
- i915_vgacntrl_reg
- i915_disable_vga
- intel_modeset_init_hw
- sanitize_watermarks
- intel_update_fdi_pll_freq
- intel_initial_commit
- intel_modeset_init
- i830_enable_pipe
- i830_disable_pipe
- intel_sanitize_plane_mapping
- intel_crtc_has_encoders
- intel_encoder_find_connector
- has_pch_trancoder
- intel_sanitize_crtc
- has_bogus_dpll_config
- intel_sanitize_encoder
- i915_redisable_vga_power_on
- i915_redisable_vga
- readout_plane_state
- intel_modeset_readout_hw_state
- get_encoder_power_domains
- intel_early_display_was
- ibx_sanitize_pch_hdmi_port
- ibx_sanitize_pch_dp_port
- ibx_sanitize_pch_ports
- intel_modeset_setup_hw_state
- intel_display_resume
- intel_hpd_poll_fini
- intel_modeset_driver_remove
- intel_modeset_vga_set_state
- intel_display_capture_error_state
- intel_display_print_error_state
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27 #include <linux/i2c.h>
28 #include <linux/input.h>
29 #include <linux/intel-iommu.h>
30 #include <linux/kernel.h>
31 #include <linux/module.h>
32 #include <linux/dma-resv.h>
33 #include <linux/slab.h>
34 #include <linux/vgaarb.h>
35
36 #include <drm/drm_atomic.h>
37 #include <drm/drm_atomic_helper.h>
38 #include <drm/drm_atomic_uapi.h>
39 #include <drm/drm_dp_helper.h>
40 #include <drm/drm_edid.h>
41 #include <drm/drm_fourcc.h>
42 #include <drm/drm_plane_helper.h>
43 #include <drm/drm_probe_helper.h>
44 #include <drm/drm_rect.h>
45 #include <drm/i915_drm.h>
46
47 #include "display/intel_crt.h"
48 #include "display/intel_ddi.h"
49 #include "display/intel_dp.h"
50 #include "display/intel_dsi.h"
51 #include "display/intel_dvo.h"
52 #include "display/intel_gmbus.h"
53 #include "display/intel_hdmi.h"
54 #include "display/intel_lvds.h"
55 #include "display/intel_sdvo.h"
56 #include "display/intel_tv.h"
57 #include "display/intel_vdsc.h"
58
59 #include "i915_drv.h"
60 #include "i915_trace.h"
61 #include "intel_acpi.h"
62 #include "intel_atomic.h"
63 #include "intel_atomic_plane.h"
64 #include "intel_bw.h"
65 #include "intel_cdclk.h"
66 #include "intel_color.h"
67 #include "intel_display_types.h"
68 #include "intel_fbc.h"
69 #include "intel_fbdev.h"
70 #include "intel_fifo_underrun.h"
71 #include "intel_frontbuffer.h"
72 #include "intel_hdcp.h"
73 #include "intel_hotplug.h"
74 #include "intel_overlay.h"
75 #include "intel_pipe_crc.h"
76 #include "intel_pm.h"
77 #include "intel_psr.h"
78 #include "intel_quirks.h"
79 #include "intel_sideband.h"
80 #include "intel_sprite.h"
81 #include "intel_tc.h"
82
83
84 static const u32 i8xx_primary_formats[] = {
85 DRM_FORMAT_C8,
86 DRM_FORMAT_RGB565,
87 DRM_FORMAT_XRGB1555,
88 DRM_FORMAT_XRGB8888,
89 };
90
91
92 static const u32 i965_primary_formats[] = {
93 DRM_FORMAT_C8,
94 DRM_FORMAT_RGB565,
95 DRM_FORMAT_XRGB8888,
96 DRM_FORMAT_XBGR8888,
97 DRM_FORMAT_XRGB2101010,
98 DRM_FORMAT_XBGR2101010,
99 };
100
101 static const u64 i9xx_format_modifiers[] = {
102 I915_FORMAT_MOD_X_TILED,
103 DRM_FORMAT_MOD_LINEAR,
104 DRM_FORMAT_MOD_INVALID
105 };
106
107
108 static const u32 intel_cursor_formats[] = {
109 DRM_FORMAT_ARGB8888,
110 };
111
112 static const u64 cursor_format_modifiers[] = {
113 DRM_FORMAT_MOD_LINEAR,
114 DRM_FORMAT_MOD_INVALID
115 };
116
117 static void i9xx_crtc_clock_get(struct intel_crtc *crtc,
118 struct intel_crtc_state *pipe_config);
119 static void ironlake_pch_clock_get(struct intel_crtc *crtc,
120 struct intel_crtc_state *pipe_config);
121
122 static int intel_framebuffer_init(struct intel_framebuffer *ifb,
123 struct drm_i915_gem_object *obj,
124 struct drm_mode_fb_cmd2 *mode_cmd);
125 static void intel_set_pipe_timings(const struct intel_crtc_state *crtc_state);
126 static void intel_set_pipe_src_size(const struct intel_crtc_state *crtc_state);
127 static void intel_cpu_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
128 const struct intel_link_m_n *m_n,
129 const struct intel_link_m_n *m2_n2);
130 static void i9xx_set_pipeconf(const struct intel_crtc_state *crtc_state);
131 static void ironlake_set_pipeconf(const struct intel_crtc_state *crtc_state);
132 static void haswell_set_pipeconf(const struct intel_crtc_state *crtc_state);
133 static void bdw_set_pipemisc(const struct intel_crtc_state *crtc_state);
134 static void vlv_prepare_pll(struct intel_crtc *crtc,
135 const struct intel_crtc_state *pipe_config);
136 static void chv_prepare_pll(struct intel_crtc *crtc,
137 const struct intel_crtc_state *pipe_config);
138 static void intel_begin_crtc_commit(struct intel_atomic_state *, struct intel_crtc *);
139 static void intel_finish_crtc_commit(struct intel_atomic_state *, struct intel_crtc *);
140 static void intel_crtc_init_scalers(struct intel_crtc *crtc,
141 struct intel_crtc_state *crtc_state);
142 static void skylake_pfit_enable(const struct intel_crtc_state *crtc_state);
143 static void ironlake_pfit_disable(const struct intel_crtc_state *old_crtc_state);
144 static void ironlake_pfit_enable(const struct intel_crtc_state *crtc_state);
145 static void intel_modeset_setup_hw_state(struct drm_device *dev,
146 struct drm_modeset_acquire_ctx *ctx);
147 static void intel_pre_disable_primary_noatomic(struct drm_crtc *crtc);
148
149 struct intel_limit {
150 struct {
151 int min, max;
152 } dot, vco, n, m, m1, m2, p, p1;
153
154 struct {
155 int dot_limit;
156 int p2_slow, p2_fast;
157 } p2;
158 };
159
160
161 int vlv_get_hpll_vco(struct drm_i915_private *dev_priv)
162 {
163 int hpll_freq, vco_freq[] = { 800, 1600, 2000, 2400 };
164
165
166 hpll_freq = vlv_cck_read(dev_priv, CCK_FUSE_REG) &
167 CCK_FUSE_HPLL_FREQ_MASK;
168
169 return vco_freq[hpll_freq] * 1000;
170 }
171
172 int vlv_get_cck_clock(struct drm_i915_private *dev_priv,
173 const char *name, u32 reg, int ref_freq)
174 {
175 u32 val;
176 int divider;
177
178 val = vlv_cck_read(dev_priv, reg);
179 divider = val & CCK_FREQUENCY_VALUES;
180
181 WARN((val & CCK_FREQUENCY_STATUS) !=
182 (divider << CCK_FREQUENCY_STATUS_SHIFT),
183 "%s change in progress\n", name);
184
185 return DIV_ROUND_CLOSEST(ref_freq << 1, divider + 1);
186 }
187
188 int vlv_get_cck_clock_hpll(struct drm_i915_private *dev_priv,
189 const char *name, u32 reg)
190 {
191 int hpll;
192
193 vlv_cck_get(dev_priv);
194
195 if (dev_priv->hpll_freq == 0)
196 dev_priv->hpll_freq = vlv_get_hpll_vco(dev_priv);
197
198 hpll = vlv_get_cck_clock(dev_priv, name, reg, dev_priv->hpll_freq);
199
200 vlv_cck_put(dev_priv);
201
202 return hpll;
203 }
204
205 static void intel_update_czclk(struct drm_i915_private *dev_priv)
206 {
207 if (!(IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)))
208 return;
209
210 dev_priv->czclk_freq = vlv_get_cck_clock_hpll(dev_priv, "czclk",
211 CCK_CZ_CLOCK_CONTROL);
212
213 DRM_DEBUG_DRIVER("CZ clock rate: %d kHz\n", dev_priv->czclk_freq);
214 }
215
216 static inline u32
217 intel_fdi_link_freq(struct drm_i915_private *dev_priv,
218 const struct intel_crtc_state *pipe_config)
219 {
220 if (HAS_DDI(dev_priv))
221 return pipe_config->port_clock;
222 else
223 return dev_priv->fdi_pll_freq;
224 }
225
226 static const struct intel_limit intel_limits_i8xx_dac = {
227 .dot = { .min = 25000, .max = 350000 },
228 .vco = { .min = 908000, .max = 1512000 },
229 .n = { .min = 2, .max = 16 },
230 .m = { .min = 96, .max = 140 },
231 .m1 = { .min = 18, .max = 26 },
232 .m2 = { .min = 6, .max = 16 },
233 .p = { .min = 4, .max = 128 },
234 .p1 = { .min = 2, .max = 33 },
235 .p2 = { .dot_limit = 165000,
236 .p2_slow = 4, .p2_fast = 2 },
237 };
238
239 static const struct intel_limit intel_limits_i8xx_dvo = {
240 .dot = { .min = 25000, .max = 350000 },
241 .vco = { .min = 908000, .max = 1512000 },
242 .n = { .min = 2, .max = 16 },
243 .m = { .min = 96, .max = 140 },
244 .m1 = { .min = 18, .max = 26 },
245 .m2 = { .min = 6, .max = 16 },
246 .p = { .min = 4, .max = 128 },
247 .p1 = { .min = 2, .max = 33 },
248 .p2 = { .dot_limit = 165000,
249 .p2_slow = 4, .p2_fast = 4 },
250 };
251
252 static const struct intel_limit intel_limits_i8xx_lvds = {
253 .dot = { .min = 25000, .max = 350000 },
254 .vco = { .min = 908000, .max = 1512000 },
255 .n = { .min = 2, .max = 16 },
256 .m = { .min = 96, .max = 140 },
257 .m1 = { .min = 18, .max = 26 },
258 .m2 = { .min = 6, .max = 16 },
259 .p = { .min = 4, .max = 128 },
260 .p1 = { .min = 1, .max = 6 },
261 .p2 = { .dot_limit = 165000,
262 .p2_slow = 14, .p2_fast = 7 },
263 };
264
265 static const struct intel_limit intel_limits_i9xx_sdvo = {
266 .dot = { .min = 20000, .max = 400000 },
267 .vco = { .min = 1400000, .max = 2800000 },
268 .n = { .min = 1, .max = 6 },
269 .m = { .min = 70, .max = 120 },
270 .m1 = { .min = 8, .max = 18 },
271 .m2 = { .min = 3, .max = 7 },
272 .p = { .min = 5, .max = 80 },
273 .p1 = { .min = 1, .max = 8 },
274 .p2 = { .dot_limit = 200000,
275 .p2_slow = 10, .p2_fast = 5 },
276 };
277
278 static const struct intel_limit intel_limits_i9xx_lvds = {
279 .dot = { .min = 20000, .max = 400000 },
280 .vco = { .min = 1400000, .max = 2800000 },
281 .n = { .min = 1, .max = 6 },
282 .m = { .min = 70, .max = 120 },
283 .m1 = { .min = 8, .max = 18 },
284 .m2 = { .min = 3, .max = 7 },
285 .p = { .min = 7, .max = 98 },
286 .p1 = { .min = 1, .max = 8 },
287 .p2 = { .dot_limit = 112000,
288 .p2_slow = 14, .p2_fast = 7 },
289 };
290
291
292 static const struct intel_limit intel_limits_g4x_sdvo = {
293 .dot = { .min = 25000, .max = 270000 },
294 .vco = { .min = 1750000, .max = 3500000},
295 .n = { .min = 1, .max = 4 },
296 .m = { .min = 104, .max = 138 },
297 .m1 = { .min = 17, .max = 23 },
298 .m2 = { .min = 5, .max = 11 },
299 .p = { .min = 10, .max = 30 },
300 .p1 = { .min = 1, .max = 3},
301 .p2 = { .dot_limit = 270000,
302 .p2_slow = 10,
303 .p2_fast = 10
304 },
305 };
306
307 static const struct intel_limit intel_limits_g4x_hdmi = {
308 .dot = { .min = 22000, .max = 400000 },
309 .vco = { .min = 1750000, .max = 3500000},
310 .n = { .min = 1, .max = 4 },
311 .m = { .min = 104, .max = 138 },
312 .m1 = { .min = 16, .max = 23 },
313 .m2 = { .min = 5, .max = 11 },
314 .p = { .min = 5, .max = 80 },
315 .p1 = { .min = 1, .max = 8},
316 .p2 = { .dot_limit = 165000,
317 .p2_slow = 10, .p2_fast = 5 },
318 };
319
320 static const struct intel_limit intel_limits_g4x_single_channel_lvds = {
321 .dot = { .min = 20000, .max = 115000 },
322 .vco = { .min = 1750000, .max = 3500000 },
323 .n = { .min = 1, .max = 3 },
324 .m = { .min = 104, .max = 138 },
325 .m1 = { .min = 17, .max = 23 },
326 .m2 = { .min = 5, .max = 11 },
327 .p = { .min = 28, .max = 112 },
328 .p1 = { .min = 2, .max = 8 },
329 .p2 = { .dot_limit = 0,
330 .p2_slow = 14, .p2_fast = 14
331 },
332 };
333
334 static const struct intel_limit intel_limits_g4x_dual_channel_lvds = {
335 .dot = { .min = 80000, .max = 224000 },
336 .vco = { .min = 1750000, .max = 3500000 },
337 .n = { .min = 1, .max = 3 },
338 .m = { .min = 104, .max = 138 },
339 .m1 = { .min = 17, .max = 23 },
340 .m2 = { .min = 5, .max = 11 },
341 .p = { .min = 14, .max = 42 },
342 .p1 = { .min = 2, .max = 6 },
343 .p2 = { .dot_limit = 0,
344 .p2_slow = 7, .p2_fast = 7
345 },
346 };
347
348 static const struct intel_limit intel_limits_pineview_sdvo = {
349 .dot = { .min = 20000, .max = 400000},
350 .vco = { .min = 1700000, .max = 3500000 },
351
352 .n = { .min = 3, .max = 6 },
353 .m = { .min = 2, .max = 256 },
354
355 .m1 = { .min = 0, .max = 0 },
356 .m2 = { .min = 0, .max = 254 },
357 .p = { .min = 5, .max = 80 },
358 .p1 = { .min = 1, .max = 8 },
359 .p2 = { .dot_limit = 200000,
360 .p2_slow = 10, .p2_fast = 5 },
361 };
362
363 static const struct intel_limit intel_limits_pineview_lvds = {
364 .dot = { .min = 20000, .max = 400000 },
365 .vco = { .min = 1700000, .max = 3500000 },
366 .n = { .min = 3, .max = 6 },
367 .m = { .min = 2, .max = 256 },
368 .m1 = { .min = 0, .max = 0 },
369 .m2 = { .min = 0, .max = 254 },
370 .p = { .min = 7, .max = 112 },
371 .p1 = { .min = 1, .max = 8 },
372 .p2 = { .dot_limit = 112000,
373 .p2_slow = 14, .p2_fast = 14 },
374 };
375
376
377
378
379
380
381 static const struct intel_limit intel_limits_ironlake_dac = {
382 .dot = { .min = 25000, .max = 350000 },
383 .vco = { .min = 1760000, .max = 3510000 },
384 .n = { .min = 1, .max = 5 },
385 .m = { .min = 79, .max = 127 },
386 .m1 = { .min = 12, .max = 22 },
387 .m2 = { .min = 5, .max = 9 },
388 .p = { .min = 5, .max = 80 },
389 .p1 = { .min = 1, .max = 8 },
390 .p2 = { .dot_limit = 225000,
391 .p2_slow = 10, .p2_fast = 5 },
392 };
393
394 static const struct intel_limit intel_limits_ironlake_single_lvds = {
395 .dot = { .min = 25000, .max = 350000 },
396 .vco = { .min = 1760000, .max = 3510000 },
397 .n = { .min = 1, .max = 3 },
398 .m = { .min = 79, .max = 118 },
399 .m1 = { .min = 12, .max = 22 },
400 .m2 = { .min = 5, .max = 9 },
401 .p = { .min = 28, .max = 112 },
402 .p1 = { .min = 2, .max = 8 },
403 .p2 = { .dot_limit = 225000,
404 .p2_slow = 14, .p2_fast = 14 },
405 };
406
407 static const struct intel_limit intel_limits_ironlake_dual_lvds = {
408 .dot = { .min = 25000, .max = 350000 },
409 .vco = { .min = 1760000, .max = 3510000 },
410 .n = { .min = 1, .max = 3 },
411 .m = { .min = 79, .max = 127 },
412 .m1 = { .min = 12, .max = 22 },
413 .m2 = { .min = 5, .max = 9 },
414 .p = { .min = 14, .max = 56 },
415 .p1 = { .min = 2, .max = 8 },
416 .p2 = { .dot_limit = 225000,
417 .p2_slow = 7, .p2_fast = 7 },
418 };
419
420
421 static const struct intel_limit intel_limits_ironlake_single_lvds_100m = {
422 .dot = { .min = 25000, .max = 350000 },
423 .vco = { .min = 1760000, .max = 3510000 },
424 .n = { .min = 1, .max = 2 },
425 .m = { .min = 79, .max = 126 },
426 .m1 = { .min = 12, .max = 22 },
427 .m2 = { .min = 5, .max = 9 },
428 .p = { .min = 28, .max = 112 },
429 .p1 = { .min = 2, .max = 8 },
430 .p2 = { .dot_limit = 225000,
431 .p2_slow = 14, .p2_fast = 14 },
432 };
433
434 static const struct intel_limit intel_limits_ironlake_dual_lvds_100m = {
435 .dot = { .min = 25000, .max = 350000 },
436 .vco = { .min = 1760000, .max = 3510000 },
437 .n = { .min = 1, .max = 3 },
438 .m = { .min = 79, .max = 126 },
439 .m1 = { .min = 12, .max = 22 },
440 .m2 = { .min = 5, .max = 9 },
441 .p = { .min = 14, .max = 42 },
442 .p1 = { .min = 2, .max = 6 },
443 .p2 = { .dot_limit = 225000,
444 .p2_slow = 7, .p2_fast = 7 },
445 };
446
447 static const struct intel_limit intel_limits_vlv = {
448
449
450
451
452
453
454 .dot = { .min = 25000 * 5, .max = 270000 * 5 },
455 .vco = { .min = 4000000, .max = 6000000 },
456 .n = { .min = 1, .max = 7 },
457 .m1 = { .min = 2, .max = 3 },
458 .m2 = { .min = 11, .max = 156 },
459 .p1 = { .min = 2, .max = 3 },
460 .p2 = { .p2_slow = 2, .p2_fast = 20 },
461 };
462
463 static const struct intel_limit intel_limits_chv = {
464
465
466
467
468
469
470 .dot = { .min = 25000 * 5, .max = 540000 * 5},
471 .vco = { .min = 4800000, .max = 6480000 },
472 .n = { .min = 1, .max = 1 },
473 .m1 = { .min = 2, .max = 2 },
474 .m2 = { .min = 24 << 22, .max = 175 << 22 },
475 .p1 = { .min = 2, .max = 4 },
476 .p2 = { .p2_slow = 1, .p2_fast = 14 },
477 };
478
479 static const struct intel_limit intel_limits_bxt = {
480
481 .dot = { .min = 0, .max = INT_MAX },
482 .vco = { .min = 4800000, .max = 6700000 },
483 .n = { .min = 1, .max = 1 },
484 .m1 = { .min = 2, .max = 2 },
485
486 .m2 = { .min = 2 << 22, .max = 255 << 22 },
487 .p1 = { .min = 2, .max = 4 },
488 .p2 = { .p2_slow = 1, .p2_fast = 20 },
489 };
490
491
492 static void
493 skl_wa_827(struct drm_i915_private *dev_priv, int pipe, bool enable)
494 {
495 if (enable)
496 I915_WRITE(CLKGATE_DIS_PSL(pipe),
497 I915_READ(CLKGATE_DIS_PSL(pipe)) |
498 DUPS1_GATING_DIS | DUPS2_GATING_DIS);
499 else
500 I915_WRITE(CLKGATE_DIS_PSL(pipe),
501 I915_READ(CLKGATE_DIS_PSL(pipe)) &
502 ~(DUPS1_GATING_DIS | DUPS2_GATING_DIS));
503 }
504
505
506 static void
507 icl_wa_scalerclkgating(struct drm_i915_private *dev_priv, enum pipe pipe,
508 bool enable)
509 {
510 if (enable)
511 I915_WRITE(CLKGATE_DIS_PSL(pipe),
512 I915_READ(CLKGATE_DIS_PSL(pipe)) | DPFR_GATING_DIS);
513 else
514 I915_WRITE(CLKGATE_DIS_PSL(pipe),
515 I915_READ(CLKGATE_DIS_PSL(pipe)) & ~DPFR_GATING_DIS);
516 }
517
518 static bool
519 needs_modeset(const struct intel_crtc_state *state)
520 {
521 return drm_atomic_crtc_needs_modeset(&state->base);
522 }
523
524
525
526
527
528
529
530
531
532
533 static int pnv_calc_dpll_params(int refclk, struct dpll *clock)
534 {
535 clock->m = clock->m2 + 2;
536 clock->p = clock->p1 * clock->p2;
537 if (WARN_ON(clock->n == 0 || clock->p == 0))
538 return 0;
539 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n);
540 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
541
542 return clock->dot;
543 }
544
545 static u32 i9xx_dpll_compute_m(struct dpll *dpll)
546 {
547 return 5 * (dpll->m1 + 2) + (dpll->m2 + 2);
548 }
549
550 static int i9xx_calc_dpll_params(int refclk, struct dpll *clock)
551 {
552 clock->m = i9xx_dpll_compute_m(clock);
553 clock->p = clock->p1 * clock->p2;
554 if (WARN_ON(clock->n + 2 == 0 || clock->p == 0))
555 return 0;
556 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n + 2);
557 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
558
559 return clock->dot;
560 }
561
562 static int vlv_calc_dpll_params(int refclk, struct dpll *clock)
563 {
564 clock->m = clock->m1 * clock->m2;
565 clock->p = clock->p1 * clock->p2;
566 if (WARN_ON(clock->n == 0 || clock->p == 0))
567 return 0;
568 clock->vco = DIV_ROUND_CLOSEST(refclk * clock->m, clock->n);
569 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
570
571 return clock->dot / 5;
572 }
573
574 int chv_calc_dpll_params(int refclk, struct dpll *clock)
575 {
576 clock->m = clock->m1 * clock->m2;
577 clock->p = clock->p1 * clock->p2;
578 if (WARN_ON(clock->n == 0 || clock->p == 0))
579 return 0;
580 clock->vco = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(refclk, clock->m),
581 clock->n << 22);
582 clock->dot = DIV_ROUND_CLOSEST(clock->vco, clock->p);
583
584 return clock->dot / 5;
585 }
586
587 #define INTELPllInvalid(s) do { return false; } while (0)
588
589
590
591
592
593 static bool intel_PLL_is_valid(struct drm_i915_private *dev_priv,
594 const struct intel_limit *limit,
595 const struct dpll *clock)
596 {
597 if (clock->n < limit->n.min || limit->n.max < clock->n)
598 INTELPllInvalid("n out of range\n");
599 if (clock->p1 < limit->p1.min || limit->p1.max < clock->p1)
600 INTELPllInvalid("p1 out of range\n");
601 if (clock->m2 < limit->m2.min || limit->m2.max < clock->m2)
602 INTELPllInvalid("m2 out of range\n");
603 if (clock->m1 < limit->m1.min || limit->m1.max < clock->m1)
604 INTELPllInvalid("m1 out of range\n");
605
606 if (!IS_PINEVIEW(dev_priv) && !IS_VALLEYVIEW(dev_priv) &&
607 !IS_CHERRYVIEW(dev_priv) && !IS_GEN9_LP(dev_priv))
608 if (clock->m1 <= clock->m2)
609 INTELPllInvalid("m1 <= m2\n");
610
611 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv) &&
612 !IS_GEN9_LP(dev_priv)) {
613 if (clock->p < limit->p.min || limit->p.max < clock->p)
614 INTELPllInvalid("p out of range\n");
615 if (clock->m < limit->m.min || limit->m.max < clock->m)
616 INTELPllInvalid("m out of range\n");
617 }
618
619 if (clock->vco < limit->vco.min || limit->vco.max < clock->vco)
620 INTELPllInvalid("vco out of range\n");
621
622
623
624 if (clock->dot < limit->dot.min || limit->dot.max < clock->dot)
625 INTELPllInvalid("dot out of range\n");
626
627 return true;
628 }
629
630 static int
631 i9xx_select_p2_div(const struct intel_limit *limit,
632 const struct intel_crtc_state *crtc_state,
633 int target)
634 {
635 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
636
637 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
638
639
640
641
642
643 if (intel_is_dual_link_lvds(dev_priv))
644 return limit->p2.p2_fast;
645 else
646 return limit->p2.p2_slow;
647 } else {
648 if (target < limit->p2.dot_limit)
649 return limit->p2.p2_slow;
650 else
651 return limit->p2.p2_fast;
652 }
653 }
654
655
656
657
658
659
660
661
662
663
664
665 static bool
666 i9xx_find_best_dpll(const struct intel_limit *limit,
667 struct intel_crtc_state *crtc_state,
668 int target, int refclk, struct dpll *match_clock,
669 struct dpll *best_clock)
670 {
671 struct drm_device *dev = crtc_state->base.crtc->dev;
672 struct dpll clock;
673 int err = target;
674
675 memset(best_clock, 0, sizeof(*best_clock));
676
677 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
678
679 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max;
680 clock.m1++) {
681 for (clock.m2 = limit->m2.min;
682 clock.m2 <= limit->m2.max; clock.m2++) {
683 if (clock.m2 >= clock.m1)
684 break;
685 for (clock.n = limit->n.min;
686 clock.n <= limit->n.max; clock.n++) {
687 for (clock.p1 = limit->p1.min;
688 clock.p1 <= limit->p1.max; clock.p1++) {
689 int this_err;
690
691 i9xx_calc_dpll_params(refclk, &clock);
692 if (!intel_PLL_is_valid(to_i915(dev),
693 limit,
694 &clock))
695 continue;
696 if (match_clock &&
697 clock.p != match_clock->p)
698 continue;
699
700 this_err = abs(clock.dot - target);
701 if (this_err < err) {
702 *best_clock = clock;
703 err = this_err;
704 }
705 }
706 }
707 }
708 }
709
710 return (err != target);
711 }
712
713
714
715
716
717
718
719
720
721
722
723 static bool
724 pnv_find_best_dpll(const struct intel_limit *limit,
725 struct intel_crtc_state *crtc_state,
726 int target, int refclk, struct dpll *match_clock,
727 struct dpll *best_clock)
728 {
729 struct drm_device *dev = crtc_state->base.crtc->dev;
730 struct dpll clock;
731 int err = target;
732
733 memset(best_clock, 0, sizeof(*best_clock));
734
735 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
736
737 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max;
738 clock.m1++) {
739 for (clock.m2 = limit->m2.min;
740 clock.m2 <= limit->m2.max; clock.m2++) {
741 for (clock.n = limit->n.min;
742 clock.n <= limit->n.max; clock.n++) {
743 for (clock.p1 = limit->p1.min;
744 clock.p1 <= limit->p1.max; clock.p1++) {
745 int this_err;
746
747 pnv_calc_dpll_params(refclk, &clock);
748 if (!intel_PLL_is_valid(to_i915(dev),
749 limit,
750 &clock))
751 continue;
752 if (match_clock &&
753 clock.p != match_clock->p)
754 continue;
755
756 this_err = abs(clock.dot - target);
757 if (this_err < err) {
758 *best_clock = clock;
759 err = this_err;
760 }
761 }
762 }
763 }
764 }
765
766 return (err != target);
767 }
768
769
770
771
772
773
774
775
776
777
778
779 static bool
780 g4x_find_best_dpll(const struct intel_limit *limit,
781 struct intel_crtc_state *crtc_state,
782 int target, int refclk, struct dpll *match_clock,
783 struct dpll *best_clock)
784 {
785 struct drm_device *dev = crtc_state->base.crtc->dev;
786 struct dpll clock;
787 int max_n;
788 bool found = false;
789
790 int err_most = (target >> 8) + (target >> 9);
791
792 memset(best_clock, 0, sizeof(*best_clock));
793
794 clock.p2 = i9xx_select_p2_div(limit, crtc_state, target);
795
796 max_n = limit->n.max;
797
798 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) {
799
800 for (clock.m1 = limit->m1.max;
801 clock.m1 >= limit->m1.min; clock.m1--) {
802 for (clock.m2 = limit->m2.max;
803 clock.m2 >= limit->m2.min; clock.m2--) {
804 for (clock.p1 = limit->p1.max;
805 clock.p1 >= limit->p1.min; clock.p1--) {
806 int this_err;
807
808 i9xx_calc_dpll_params(refclk, &clock);
809 if (!intel_PLL_is_valid(to_i915(dev),
810 limit,
811 &clock))
812 continue;
813
814 this_err = abs(clock.dot - target);
815 if (this_err < err_most) {
816 *best_clock = clock;
817 err_most = this_err;
818 max_n = clock.n;
819 found = true;
820 }
821 }
822 }
823 }
824 }
825 return found;
826 }
827
828
829
830
831
832 static bool vlv_PLL_is_optimal(struct drm_device *dev, int target_freq,
833 const struct dpll *calculated_clock,
834 const struct dpll *best_clock,
835 unsigned int best_error_ppm,
836 unsigned int *error_ppm)
837 {
838
839
840
841
842 if (IS_CHERRYVIEW(to_i915(dev))) {
843 *error_ppm = 0;
844
845 return calculated_clock->p > best_clock->p;
846 }
847
848 if (WARN_ON_ONCE(!target_freq))
849 return false;
850
851 *error_ppm = div_u64(1000000ULL *
852 abs(target_freq - calculated_clock->dot),
853 target_freq);
854
855
856
857
858
859 if (*error_ppm < 100 && calculated_clock->p > best_clock->p) {
860 *error_ppm = 0;
861
862 return true;
863 }
864
865 return *error_ppm + 10 < best_error_ppm;
866 }
867
868
869
870
871
872
873 static bool
874 vlv_find_best_dpll(const struct intel_limit *limit,
875 struct intel_crtc_state *crtc_state,
876 int target, int refclk, struct dpll *match_clock,
877 struct dpll *best_clock)
878 {
879 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
880 struct drm_device *dev = crtc->base.dev;
881 struct dpll clock;
882 unsigned int bestppm = 1000000;
883
884 int max_n = min(limit->n.max, refclk / 19200);
885 bool found = false;
886
887 target *= 5;
888
889 memset(best_clock, 0, sizeof(*best_clock));
890
891
892 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) {
893 for (clock.p1 = limit->p1.max; clock.p1 >= limit->p1.min; clock.p1--) {
894 for (clock.p2 = limit->p2.p2_fast; clock.p2 >= limit->p2.p2_slow;
895 clock.p2 -= clock.p2 > 10 ? 2 : 1) {
896 clock.p = clock.p1 * clock.p2;
897
898 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max; clock.m1++) {
899 unsigned int ppm;
900
901 clock.m2 = DIV_ROUND_CLOSEST(target * clock.p * clock.n,
902 refclk * clock.m1);
903
904 vlv_calc_dpll_params(refclk, &clock);
905
906 if (!intel_PLL_is_valid(to_i915(dev),
907 limit,
908 &clock))
909 continue;
910
911 if (!vlv_PLL_is_optimal(dev, target,
912 &clock,
913 best_clock,
914 bestppm, &ppm))
915 continue;
916
917 *best_clock = clock;
918 bestppm = ppm;
919 found = true;
920 }
921 }
922 }
923 }
924
925 return found;
926 }
927
928
929
930
931
932
933 static bool
934 chv_find_best_dpll(const struct intel_limit *limit,
935 struct intel_crtc_state *crtc_state,
936 int target, int refclk, struct dpll *match_clock,
937 struct dpll *best_clock)
938 {
939 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
940 struct drm_device *dev = crtc->base.dev;
941 unsigned int best_error_ppm;
942 struct dpll clock;
943 u64 m2;
944 int found = false;
945
946 memset(best_clock, 0, sizeof(*best_clock));
947 best_error_ppm = 1000000;
948
949
950
951
952
953
954 clock.n = 1, clock.m1 = 2;
955 target *= 5;
956
957 for (clock.p1 = limit->p1.max; clock.p1 >= limit->p1.min; clock.p1--) {
958 for (clock.p2 = limit->p2.p2_fast;
959 clock.p2 >= limit->p2.p2_slow;
960 clock.p2 -= clock.p2 > 10 ? 2 : 1) {
961 unsigned int error_ppm;
962
963 clock.p = clock.p1 * clock.p2;
964
965 m2 = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(target, clock.p * clock.n) << 22,
966 refclk * clock.m1);
967
968 if (m2 > INT_MAX/clock.m1)
969 continue;
970
971 clock.m2 = m2;
972
973 chv_calc_dpll_params(refclk, &clock);
974
975 if (!intel_PLL_is_valid(to_i915(dev), limit, &clock))
976 continue;
977
978 if (!vlv_PLL_is_optimal(dev, target, &clock, best_clock,
979 best_error_ppm, &error_ppm))
980 continue;
981
982 *best_clock = clock;
983 best_error_ppm = error_ppm;
984 found = true;
985 }
986 }
987
988 return found;
989 }
990
991 bool bxt_find_best_dpll(struct intel_crtc_state *crtc_state,
992 struct dpll *best_clock)
993 {
994 int refclk = 100000;
995 const struct intel_limit *limit = &intel_limits_bxt;
996
997 return chv_find_best_dpll(limit, crtc_state,
998 crtc_state->port_clock, refclk,
999 NULL, best_clock);
1000 }
1001
1002 bool intel_crtc_active(struct intel_crtc *crtc)
1003 {
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017 return crtc->active && crtc->base.primary->state->fb &&
1018 crtc->config->base.adjusted_mode.crtc_clock;
1019 }
1020
1021 enum transcoder intel_pipe_to_cpu_transcoder(struct drm_i915_private *dev_priv,
1022 enum pipe pipe)
1023 {
1024 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
1025
1026 return crtc->config->cpu_transcoder;
1027 }
1028
1029 static bool pipe_scanline_is_moving(struct drm_i915_private *dev_priv,
1030 enum pipe pipe)
1031 {
1032 i915_reg_t reg = PIPEDSL(pipe);
1033 u32 line1, line2;
1034 u32 line_mask;
1035
1036 if (IS_GEN(dev_priv, 2))
1037 line_mask = DSL_LINEMASK_GEN2;
1038 else
1039 line_mask = DSL_LINEMASK_GEN3;
1040
1041 line1 = I915_READ(reg) & line_mask;
1042 msleep(5);
1043 line2 = I915_READ(reg) & line_mask;
1044
1045 return line1 != line2;
1046 }
1047
1048 static void wait_for_pipe_scanline_moving(struct intel_crtc *crtc, bool state)
1049 {
1050 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1051 enum pipe pipe = crtc->pipe;
1052
1053
1054 if (wait_for(pipe_scanline_is_moving(dev_priv, pipe) == state, 100))
1055 DRM_ERROR("pipe %c scanline %s wait timed out\n",
1056 pipe_name(pipe), onoff(state));
1057 }
1058
1059 static void intel_wait_for_pipe_scanline_stopped(struct intel_crtc *crtc)
1060 {
1061 wait_for_pipe_scanline_moving(crtc, false);
1062 }
1063
1064 static void intel_wait_for_pipe_scanline_moving(struct intel_crtc *crtc)
1065 {
1066 wait_for_pipe_scanline_moving(crtc, true);
1067 }
1068
1069 static void
1070 intel_wait_for_pipe_off(const struct intel_crtc_state *old_crtc_state)
1071 {
1072 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
1073 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1074
1075 if (INTEL_GEN(dev_priv) >= 4) {
1076 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
1077 i915_reg_t reg = PIPECONF(cpu_transcoder);
1078
1079
1080 if (intel_de_wait_for_clear(dev_priv, reg,
1081 I965_PIPECONF_ACTIVE, 100))
1082 WARN(1, "pipe_off wait timed out\n");
1083 } else {
1084 intel_wait_for_pipe_scanline_stopped(crtc);
1085 }
1086 }
1087
1088
1089 void assert_pll(struct drm_i915_private *dev_priv,
1090 enum pipe pipe, bool state)
1091 {
1092 u32 val;
1093 bool cur_state;
1094
1095 val = I915_READ(DPLL(pipe));
1096 cur_state = !!(val & DPLL_VCO_ENABLE);
1097 I915_STATE_WARN(cur_state != state,
1098 "PLL state assertion failure (expected %s, current %s)\n",
1099 onoff(state), onoff(cur_state));
1100 }
1101
1102
1103 void assert_dsi_pll(struct drm_i915_private *dev_priv, bool state)
1104 {
1105 u32 val;
1106 bool cur_state;
1107
1108 vlv_cck_get(dev_priv);
1109 val = vlv_cck_read(dev_priv, CCK_REG_DSI_PLL_CONTROL);
1110 vlv_cck_put(dev_priv);
1111
1112 cur_state = val & DSI_PLL_VCO_EN;
1113 I915_STATE_WARN(cur_state != state,
1114 "DSI PLL state assertion failure (expected %s, current %s)\n",
1115 onoff(state), onoff(cur_state));
1116 }
1117
1118 static void assert_fdi_tx(struct drm_i915_private *dev_priv,
1119 enum pipe pipe, bool state)
1120 {
1121 bool cur_state;
1122 enum transcoder cpu_transcoder = intel_pipe_to_cpu_transcoder(dev_priv,
1123 pipe);
1124
1125 if (HAS_DDI(dev_priv)) {
1126
1127 u32 val = I915_READ(TRANS_DDI_FUNC_CTL(cpu_transcoder));
1128 cur_state = !!(val & TRANS_DDI_FUNC_ENABLE);
1129 } else {
1130 u32 val = I915_READ(FDI_TX_CTL(pipe));
1131 cur_state = !!(val & FDI_TX_ENABLE);
1132 }
1133 I915_STATE_WARN(cur_state != state,
1134 "FDI TX state assertion failure (expected %s, current %s)\n",
1135 onoff(state), onoff(cur_state));
1136 }
1137 #define assert_fdi_tx_enabled(d, p) assert_fdi_tx(d, p, true)
1138 #define assert_fdi_tx_disabled(d, p) assert_fdi_tx(d, p, false)
1139
1140 static void assert_fdi_rx(struct drm_i915_private *dev_priv,
1141 enum pipe pipe, bool state)
1142 {
1143 u32 val;
1144 bool cur_state;
1145
1146 val = I915_READ(FDI_RX_CTL(pipe));
1147 cur_state = !!(val & FDI_RX_ENABLE);
1148 I915_STATE_WARN(cur_state != state,
1149 "FDI RX state assertion failure (expected %s, current %s)\n",
1150 onoff(state), onoff(cur_state));
1151 }
1152 #define assert_fdi_rx_enabled(d, p) assert_fdi_rx(d, p, true)
1153 #define assert_fdi_rx_disabled(d, p) assert_fdi_rx(d, p, false)
1154
1155 static void assert_fdi_tx_pll_enabled(struct drm_i915_private *dev_priv,
1156 enum pipe pipe)
1157 {
1158 u32 val;
1159
1160
1161 if (IS_GEN(dev_priv, 5))
1162 return;
1163
1164
1165 if (HAS_DDI(dev_priv))
1166 return;
1167
1168 val = I915_READ(FDI_TX_CTL(pipe));
1169 I915_STATE_WARN(!(val & FDI_TX_PLL_ENABLE), "FDI TX PLL assertion failure, should be active but is disabled\n");
1170 }
1171
1172 void assert_fdi_rx_pll(struct drm_i915_private *dev_priv,
1173 enum pipe pipe, bool state)
1174 {
1175 u32 val;
1176 bool cur_state;
1177
1178 val = I915_READ(FDI_RX_CTL(pipe));
1179 cur_state = !!(val & FDI_RX_PLL_ENABLE);
1180 I915_STATE_WARN(cur_state != state,
1181 "FDI RX PLL assertion failure (expected %s, current %s)\n",
1182 onoff(state), onoff(cur_state));
1183 }
1184
1185 void assert_panel_unlocked(struct drm_i915_private *dev_priv, enum pipe pipe)
1186 {
1187 i915_reg_t pp_reg;
1188 u32 val;
1189 enum pipe panel_pipe = INVALID_PIPE;
1190 bool locked = true;
1191
1192 if (WARN_ON(HAS_DDI(dev_priv)))
1193 return;
1194
1195 if (HAS_PCH_SPLIT(dev_priv)) {
1196 u32 port_sel;
1197
1198 pp_reg = PP_CONTROL(0);
1199 port_sel = I915_READ(PP_ON_DELAYS(0)) & PANEL_PORT_SELECT_MASK;
1200
1201 switch (port_sel) {
1202 case PANEL_PORT_SELECT_LVDS:
1203 intel_lvds_port_enabled(dev_priv, PCH_LVDS, &panel_pipe);
1204 break;
1205 case PANEL_PORT_SELECT_DPA:
1206 intel_dp_port_enabled(dev_priv, DP_A, PORT_A, &panel_pipe);
1207 break;
1208 case PANEL_PORT_SELECT_DPC:
1209 intel_dp_port_enabled(dev_priv, PCH_DP_C, PORT_C, &panel_pipe);
1210 break;
1211 case PANEL_PORT_SELECT_DPD:
1212 intel_dp_port_enabled(dev_priv, PCH_DP_D, PORT_D, &panel_pipe);
1213 break;
1214 default:
1215 MISSING_CASE(port_sel);
1216 break;
1217 }
1218 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
1219
1220 pp_reg = PP_CONTROL(pipe);
1221 panel_pipe = pipe;
1222 } else {
1223 u32 port_sel;
1224
1225 pp_reg = PP_CONTROL(0);
1226 port_sel = I915_READ(PP_ON_DELAYS(0)) & PANEL_PORT_SELECT_MASK;
1227
1228 WARN_ON(port_sel != PANEL_PORT_SELECT_LVDS);
1229 intel_lvds_port_enabled(dev_priv, LVDS, &panel_pipe);
1230 }
1231
1232 val = I915_READ(pp_reg);
1233 if (!(val & PANEL_POWER_ON) ||
1234 ((val & PANEL_UNLOCK_MASK) == PANEL_UNLOCK_REGS))
1235 locked = false;
1236
1237 I915_STATE_WARN(panel_pipe == pipe && locked,
1238 "panel assertion failure, pipe %c regs locked\n",
1239 pipe_name(pipe));
1240 }
1241
1242 void assert_pipe(struct drm_i915_private *dev_priv,
1243 enum pipe pipe, bool state)
1244 {
1245 bool cur_state;
1246 enum transcoder cpu_transcoder = intel_pipe_to_cpu_transcoder(dev_priv,
1247 pipe);
1248 enum intel_display_power_domain power_domain;
1249 intel_wakeref_t wakeref;
1250
1251
1252 if (IS_I830(dev_priv))
1253 state = true;
1254
1255 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
1256 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
1257 if (wakeref) {
1258 u32 val = I915_READ(PIPECONF(cpu_transcoder));
1259 cur_state = !!(val & PIPECONF_ENABLE);
1260
1261 intel_display_power_put(dev_priv, power_domain, wakeref);
1262 } else {
1263 cur_state = false;
1264 }
1265
1266 I915_STATE_WARN(cur_state != state,
1267 "pipe %c assertion failure (expected %s, current %s)\n",
1268 pipe_name(pipe), onoff(state), onoff(cur_state));
1269 }
1270
1271 static void assert_plane(struct intel_plane *plane, bool state)
1272 {
1273 enum pipe pipe;
1274 bool cur_state;
1275
1276 cur_state = plane->get_hw_state(plane, &pipe);
1277
1278 I915_STATE_WARN(cur_state != state,
1279 "%s assertion failure (expected %s, current %s)\n",
1280 plane->base.name, onoff(state), onoff(cur_state));
1281 }
1282
1283 #define assert_plane_enabled(p) assert_plane(p, true)
1284 #define assert_plane_disabled(p) assert_plane(p, false)
1285
1286 static void assert_planes_disabled(struct intel_crtc *crtc)
1287 {
1288 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1289 struct intel_plane *plane;
1290
1291 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
1292 assert_plane_disabled(plane);
1293 }
1294
1295 static void assert_vblank_disabled(struct drm_crtc *crtc)
1296 {
1297 if (I915_STATE_WARN_ON(drm_crtc_vblank_get(crtc) == 0))
1298 drm_crtc_vblank_put(crtc);
1299 }
1300
1301 void assert_pch_transcoder_disabled(struct drm_i915_private *dev_priv,
1302 enum pipe pipe)
1303 {
1304 u32 val;
1305 bool enabled;
1306
1307 val = I915_READ(PCH_TRANSCONF(pipe));
1308 enabled = !!(val & TRANS_ENABLE);
1309 I915_STATE_WARN(enabled,
1310 "transcoder assertion failed, should be off on pipe %c but is still active\n",
1311 pipe_name(pipe));
1312 }
1313
1314 static void assert_pch_dp_disabled(struct drm_i915_private *dev_priv,
1315 enum pipe pipe, enum port port,
1316 i915_reg_t dp_reg)
1317 {
1318 enum pipe port_pipe;
1319 bool state;
1320
1321 state = intel_dp_port_enabled(dev_priv, dp_reg, port, &port_pipe);
1322
1323 I915_STATE_WARN(state && port_pipe == pipe,
1324 "PCH DP %c enabled on transcoder %c, should be disabled\n",
1325 port_name(port), pipe_name(pipe));
1326
1327 I915_STATE_WARN(HAS_PCH_IBX(dev_priv) && !state && port_pipe == PIPE_B,
1328 "IBX PCH DP %c still using transcoder B\n",
1329 port_name(port));
1330 }
1331
1332 static void assert_pch_hdmi_disabled(struct drm_i915_private *dev_priv,
1333 enum pipe pipe, enum port port,
1334 i915_reg_t hdmi_reg)
1335 {
1336 enum pipe port_pipe;
1337 bool state;
1338
1339 state = intel_sdvo_port_enabled(dev_priv, hdmi_reg, &port_pipe);
1340
1341 I915_STATE_WARN(state && port_pipe == pipe,
1342 "PCH HDMI %c enabled on transcoder %c, should be disabled\n",
1343 port_name(port), pipe_name(pipe));
1344
1345 I915_STATE_WARN(HAS_PCH_IBX(dev_priv) && !state && port_pipe == PIPE_B,
1346 "IBX PCH HDMI %c still using transcoder B\n",
1347 port_name(port));
1348 }
1349
1350 static void assert_pch_ports_disabled(struct drm_i915_private *dev_priv,
1351 enum pipe pipe)
1352 {
1353 enum pipe port_pipe;
1354
1355 assert_pch_dp_disabled(dev_priv, pipe, PORT_B, PCH_DP_B);
1356 assert_pch_dp_disabled(dev_priv, pipe, PORT_C, PCH_DP_C);
1357 assert_pch_dp_disabled(dev_priv, pipe, PORT_D, PCH_DP_D);
1358
1359 I915_STATE_WARN(intel_crt_port_enabled(dev_priv, PCH_ADPA, &port_pipe) &&
1360 port_pipe == pipe,
1361 "PCH VGA enabled on transcoder %c, should be disabled\n",
1362 pipe_name(pipe));
1363
1364 I915_STATE_WARN(intel_lvds_port_enabled(dev_priv, PCH_LVDS, &port_pipe) &&
1365 port_pipe == pipe,
1366 "PCH LVDS enabled on transcoder %c, should be disabled\n",
1367 pipe_name(pipe));
1368
1369
1370 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_B, PCH_HDMIB);
1371 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_C, PCH_HDMIC);
1372 assert_pch_hdmi_disabled(dev_priv, pipe, PORT_D, PCH_HDMID);
1373 }
1374
1375 static void _vlv_enable_pll(struct intel_crtc *crtc,
1376 const struct intel_crtc_state *pipe_config)
1377 {
1378 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1379 enum pipe pipe = crtc->pipe;
1380
1381 I915_WRITE(DPLL(pipe), pipe_config->dpll_hw_state.dpll);
1382 POSTING_READ(DPLL(pipe));
1383 udelay(150);
1384
1385 if (intel_de_wait_for_set(dev_priv, DPLL(pipe), DPLL_LOCK_VLV, 1))
1386 DRM_ERROR("DPLL %d failed to lock\n", pipe);
1387 }
1388
1389 static void vlv_enable_pll(struct intel_crtc *crtc,
1390 const struct intel_crtc_state *pipe_config)
1391 {
1392 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1393 enum pipe pipe = crtc->pipe;
1394
1395 assert_pipe_disabled(dev_priv, pipe);
1396
1397
1398 assert_panel_unlocked(dev_priv, pipe);
1399
1400 if (pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE)
1401 _vlv_enable_pll(crtc, pipe_config);
1402
1403 I915_WRITE(DPLL_MD(pipe), pipe_config->dpll_hw_state.dpll_md);
1404 POSTING_READ(DPLL_MD(pipe));
1405 }
1406
1407
1408 static void _chv_enable_pll(struct intel_crtc *crtc,
1409 const struct intel_crtc_state *pipe_config)
1410 {
1411 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1412 enum pipe pipe = crtc->pipe;
1413 enum dpio_channel port = vlv_pipe_to_channel(pipe);
1414 u32 tmp;
1415
1416 vlv_dpio_get(dev_priv);
1417
1418
1419 tmp = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port));
1420 tmp |= DPIO_DCLKP_EN;
1421 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port), tmp);
1422
1423 vlv_dpio_put(dev_priv);
1424
1425
1426
1427
1428 udelay(1);
1429
1430
1431 I915_WRITE(DPLL(pipe), pipe_config->dpll_hw_state.dpll);
1432
1433
1434 if (intel_de_wait_for_set(dev_priv, DPLL(pipe), DPLL_LOCK_VLV, 1))
1435 DRM_ERROR("PLL %d failed to lock\n", pipe);
1436 }
1437
1438 static void chv_enable_pll(struct intel_crtc *crtc,
1439 const struct intel_crtc_state *pipe_config)
1440 {
1441 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1442 enum pipe pipe = crtc->pipe;
1443
1444 assert_pipe_disabled(dev_priv, pipe);
1445
1446
1447 assert_panel_unlocked(dev_priv, pipe);
1448
1449 if (pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE)
1450 _chv_enable_pll(crtc, pipe_config);
1451
1452 if (pipe != PIPE_A) {
1453
1454
1455
1456
1457
1458
1459 I915_WRITE(CBR4_VLV, CBR_DPLLBMD_PIPE(pipe));
1460 I915_WRITE(DPLL_MD(PIPE_B), pipe_config->dpll_hw_state.dpll_md);
1461 I915_WRITE(CBR4_VLV, 0);
1462 dev_priv->chv_dpll_md[pipe] = pipe_config->dpll_hw_state.dpll_md;
1463
1464
1465
1466
1467
1468 WARN_ON((I915_READ(DPLL(PIPE_B)) & DPLL_VGA_MODE_DIS) == 0);
1469 } else {
1470 I915_WRITE(DPLL_MD(pipe), pipe_config->dpll_hw_state.dpll_md);
1471 POSTING_READ(DPLL_MD(pipe));
1472 }
1473 }
1474
1475 static bool i9xx_has_pps(struct drm_i915_private *dev_priv)
1476 {
1477 if (IS_I830(dev_priv))
1478 return false;
1479
1480 return IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv);
1481 }
1482
1483 static void i9xx_enable_pll(struct intel_crtc *crtc,
1484 const struct intel_crtc_state *crtc_state)
1485 {
1486 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1487 i915_reg_t reg = DPLL(crtc->pipe);
1488 u32 dpll = crtc_state->dpll_hw_state.dpll;
1489 int i;
1490
1491 assert_pipe_disabled(dev_priv, crtc->pipe);
1492
1493
1494 if (i9xx_has_pps(dev_priv))
1495 assert_panel_unlocked(dev_priv, crtc->pipe);
1496
1497
1498
1499
1500
1501
1502 I915_WRITE(reg, dpll & ~DPLL_VGA_MODE_DIS);
1503 I915_WRITE(reg, dpll);
1504
1505
1506 POSTING_READ(reg);
1507 udelay(150);
1508
1509 if (INTEL_GEN(dev_priv) >= 4) {
1510 I915_WRITE(DPLL_MD(crtc->pipe),
1511 crtc_state->dpll_hw_state.dpll_md);
1512 } else {
1513
1514
1515
1516
1517
1518 I915_WRITE(reg, dpll);
1519 }
1520
1521
1522 for (i = 0; i < 3; i++) {
1523 I915_WRITE(reg, dpll);
1524 POSTING_READ(reg);
1525 udelay(150);
1526 }
1527 }
1528
1529 static void i9xx_disable_pll(const struct intel_crtc_state *crtc_state)
1530 {
1531 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
1532 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1533 enum pipe pipe = crtc->pipe;
1534
1535
1536 if (IS_I830(dev_priv))
1537 return;
1538
1539
1540 assert_pipe_disabled(dev_priv, pipe);
1541
1542 I915_WRITE(DPLL(pipe), DPLL_VGA_MODE_DIS);
1543 POSTING_READ(DPLL(pipe));
1544 }
1545
1546 static void vlv_disable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1547 {
1548 u32 val;
1549
1550
1551 assert_pipe_disabled(dev_priv, pipe);
1552
1553 val = DPLL_INTEGRATED_REF_CLK_VLV |
1554 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1555 if (pipe != PIPE_A)
1556 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1557
1558 I915_WRITE(DPLL(pipe), val);
1559 POSTING_READ(DPLL(pipe));
1560 }
1561
1562 static void chv_disable_pll(struct drm_i915_private *dev_priv, enum pipe pipe)
1563 {
1564 enum dpio_channel port = vlv_pipe_to_channel(pipe);
1565 u32 val;
1566
1567
1568 assert_pipe_disabled(dev_priv, pipe);
1569
1570 val = DPLL_SSC_REF_CLK_CHV |
1571 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
1572 if (pipe != PIPE_A)
1573 val |= DPLL_INTEGRATED_CRI_CLK_VLV;
1574
1575 I915_WRITE(DPLL(pipe), val);
1576 POSTING_READ(DPLL(pipe));
1577
1578 vlv_dpio_get(dev_priv);
1579
1580
1581 val = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port));
1582 val &= ~DPIO_DCLKP_EN;
1583 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port), val);
1584
1585 vlv_dpio_put(dev_priv);
1586 }
1587
1588 void vlv_wait_port_ready(struct drm_i915_private *dev_priv,
1589 struct intel_digital_port *dport,
1590 unsigned int expected_mask)
1591 {
1592 u32 port_mask;
1593 i915_reg_t dpll_reg;
1594
1595 switch (dport->base.port) {
1596 case PORT_B:
1597 port_mask = DPLL_PORTB_READY_MASK;
1598 dpll_reg = DPLL(0);
1599 break;
1600 case PORT_C:
1601 port_mask = DPLL_PORTC_READY_MASK;
1602 dpll_reg = DPLL(0);
1603 expected_mask <<= 4;
1604 break;
1605 case PORT_D:
1606 port_mask = DPLL_PORTD_READY_MASK;
1607 dpll_reg = DPIO_PHY_STATUS;
1608 break;
1609 default:
1610 BUG();
1611 }
1612
1613 if (intel_de_wait_for_register(dev_priv, dpll_reg,
1614 port_mask, expected_mask, 1000))
1615 WARN(1, "timed out waiting for port %c ready: got 0x%x, expected 0x%x\n",
1616 port_name(dport->base.port),
1617 I915_READ(dpll_reg) & port_mask, expected_mask);
1618 }
1619
1620 static void ironlake_enable_pch_transcoder(const struct intel_crtc_state *crtc_state)
1621 {
1622 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
1623 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1624 enum pipe pipe = crtc->pipe;
1625 i915_reg_t reg;
1626 u32 val, pipeconf_val;
1627
1628
1629 assert_shared_dpll_enabled(dev_priv, crtc_state->shared_dpll);
1630
1631
1632 assert_fdi_tx_enabled(dev_priv, pipe);
1633 assert_fdi_rx_enabled(dev_priv, pipe);
1634
1635 if (HAS_PCH_CPT(dev_priv)) {
1636
1637
1638 reg = TRANS_CHICKEN2(pipe);
1639 val = I915_READ(reg);
1640 val |= TRANS_CHICKEN2_TIMING_OVERRIDE;
1641 I915_WRITE(reg, val);
1642 }
1643
1644 reg = PCH_TRANSCONF(pipe);
1645 val = I915_READ(reg);
1646 pipeconf_val = I915_READ(PIPECONF(pipe));
1647
1648 if (HAS_PCH_IBX(dev_priv)) {
1649
1650
1651
1652
1653
1654 val &= ~PIPECONF_BPC_MASK;
1655 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
1656 val |= PIPECONF_8BPC;
1657 else
1658 val |= pipeconf_val & PIPECONF_BPC_MASK;
1659 }
1660
1661 val &= ~TRANS_INTERLACE_MASK;
1662 if ((pipeconf_val & PIPECONF_INTERLACE_MASK) == PIPECONF_INTERLACED_ILK) {
1663 if (HAS_PCH_IBX(dev_priv) &&
1664 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
1665 val |= TRANS_LEGACY_INTERLACED_ILK;
1666 else
1667 val |= TRANS_INTERLACED;
1668 } else {
1669 val |= TRANS_PROGRESSIVE;
1670 }
1671
1672 I915_WRITE(reg, val | TRANS_ENABLE);
1673 if (intel_de_wait_for_set(dev_priv, reg, TRANS_STATE_ENABLE, 100))
1674 DRM_ERROR("failed to enable transcoder %c\n", pipe_name(pipe));
1675 }
1676
1677 static void lpt_enable_pch_transcoder(struct drm_i915_private *dev_priv,
1678 enum transcoder cpu_transcoder)
1679 {
1680 u32 val, pipeconf_val;
1681
1682
1683 assert_fdi_tx_enabled(dev_priv, (enum pipe) cpu_transcoder);
1684 assert_fdi_rx_enabled(dev_priv, PIPE_A);
1685
1686
1687 val = I915_READ(TRANS_CHICKEN2(PIPE_A));
1688 val |= TRANS_CHICKEN2_TIMING_OVERRIDE;
1689 I915_WRITE(TRANS_CHICKEN2(PIPE_A), val);
1690
1691 val = TRANS_ENABLE;
1692 pipeconf_val = I915_READ(PIPECONF(cpu_transcoder));
1693
1694 if ((pipeconf_val & PIPECONF_INTERLACE_MASK_HSW) ==
1695 PIPECONF_INTERLACED_ILK)
1696 val |= TRANS_INTERLACED;
1697 else
1698 val |= TRANS_PROGRESSIVE;
1699
1700 I915_WRITE(LPT_TRANSCONF, val);
1701 if (intel_de_wait_for_set(dev_priv, LPT_TRANSCONF,
1702 TRANS_STATE_ENABLE, 100))
1703 DRM_ERROR("Failed to enable PCH transcoder\n");
1704 }
1705
1706 static void ironlake_disable_pch_transcoder(struct drm_i915_private *dev_priv,
1707 enum pipe pipe)
1708 {
1709 i915_reg_t reg;
1710 u32 val;
1711
1712
1713 assert_fdi_tx_disabled(dev_priv, pipe);
1714 assert_fdi_rx_disabled(dev_priv, pipe);
1715
1716
1717 assert_pch_ports_disabled(dev_priv, pipe);
1718
1719 reg = PCH_TRANSCONF(pipe);
1720 val = I915_READ(reg);
1721 val &= ~TRANS_ENABLE;
1722 I915_WRITE(reg, val);
1723
1724 if (intel_de_wait_for_clear(dev_priv, reg, TRANS_STATE_ENABLE, 50))
1725 DRM_ERROR("failed to disable transcoder %c\n", pipe_name(pipe));
1726
1727 if (HAS_PCH_CPT(dev_priv)) {
1728
1729 reg = TRANS_CHICKEN2(pipe);
1730 val = I915_READ(reg);
1731 val &= ~TRANS_CHICKEN2_TIMING_OVERRIDE;
1732 I915_WRITE(reg, val);
1733 }
1734 }
1735
1736 void lpt_disable_pch_transcoder(struct drm_i915_private *dev_priv)
1737 {
1738 u32 val;
1739
1740 val = I915_READ(LPT_TRANSCONF);
1741 val &= ~TRANS_ENABLE;
1742 I915_WRITE(LPT_TRANSCONF, val);
1743
1744 if (intel_de_wait_for_clear(dev_priv, LPT_TRANSCONF,
1745 TRANS_STATE_ENABLE, 50))
1746 DRM_ERROR("Failed to disable PCH transcoder\n");
1747
1748
1749 val = I915_READ(TRANS_CHICKEN2(PIPE_A));
1750 val &= ~TRANS_CHICKEN2_TIMING_OVERRIDE;
1751 I915_WRITE(TRANS_CHICKEN2(PIPE_A), val);
1752 }
1753
1754 enum pipe intel_crtc_pch_transcoder(struct intel_crtc *crtc)
1755 {
1756 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1757
1758 if (HAS_PCH_LPT(dev_priv))
1759 return PIPE_A;
1760 else
1761 return crtc->pipe;
1762 }
1763
1764 static u32 intel_crtc_max_vblank_count(const struct intel_crtc_state *crtc_state)
1765 {
1766 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
1767
1768
1769
1770
1771
1772 if (IS_I965GM(dev_priv) &&
1773 (crtc_state->output_types & BIT(INTEL_OUTPUT_TVOUT)))
1774 return 0;
1775
1776 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
1777 return 0xffffffff;
1778 else if (INTEL_GEN(dev_priv) >= 3)
1779 return 0xffffff;
1780 else
1781 return 0;
1782 }
1783
1784 static void intel_crtc_vblank_on(const struct intel_crtc_state *crtc_state)
1785 {
1786 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
1787
1788 drm_crtc_set_max_vblank_count(&crtc->base,
1789 intel_crtc_max_vblank_count(crtc_state));
1790 drm_crtc_vblank_on(&crtc->base);
1791 }
1792
1793 static void intel_enable_pipe(const struct intel_crtc_state *new_crtc_state)
1794 {
1795 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
1796 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1797 enum transcoder cpu_transcoder = new_crtc_state->cpu_transcoder;
1798 enum pipe pipe = crtc->pipe;
1799 i915_reg_t reg;
1800 u32 val;
1801
1802 DRM_DEBUG_KMS("enabling pipe %c\n", pipe_name(pipe));
1803
1804 assert_planes_disabled(crtc);
1805
1806
1807
1808
1809
1810
1811 if (HAS_GMCH(dev_priv)) {
1812 if (intel_crtc_has_type(new_crtc_state, INTEL_OUTPUT_DSI))
1813 assert_dsi_pll_enabled(dev_priv);
1814 else
1815 assert_pll_enabled(dev_priv, pipe);
1816 } else {
1817 if (new_crtc_state->has_pch_encoder) {
1818
1819 assert_fdi_rx_pll_enabled(dev_priv,
1820 intel_crtc_pch_transcoder(crtc));
1821 assert_fdi_tx_pll_enabled(dev_priv,
1822 (enum pipe) cpu_transcoder);
1823 }
1824
1825 }
1826
1827 trace_intel_pipe_enable(crtc);
1828
1829 reg = PIPECONF(cpu_transcoder);
1830 val = I915_READ(reg);
1831 if (val & PIPECONF_ENABLE) {
1832
1833 WARN_ON(!IS_I830(dev_priv));
1834 return;
1835 }
1836
1837 I915_WRITE(reg, val | PIPECONF_ENABLE);
1838 POSTING_READ(reg);
1839
1840
1841
1842
1843
1844
1845
1846
1847 if (intel_crtc_max_vblank_count(new_crtc_state) == 0)
1848 intel_wait_for_pipe_scanline_moving(crtc);
1849 }
1850
1851 static void intel_disable_pipe(const struct intel_crtc_state *old_crtc_state)
1852 {
1853 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
1854 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1855 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
1856 enum pipe pipe = crtc->pipe;
1857 i915_reg_t reg;
1858 u32 val;
1859
1860 DRM_DEBUG_KMS("disabling pipe %c\n", pipe_name(pipe));
1861
1862
1863
1864
1865
1866 assert_planes_disabled(crtc);
1867
1868 trace_intel_pipe_disable(crtc);
1869
1870 reg = PIPECONF(cpu_transcoder);
1871 val = I915_READ(reg);
1872 if ((val & PIPECONF_ENABLE) == 0)
1873 return;
1874
1875
1876
1877
1878
1879 if (old_crtc_state->double_wide)
1880 val &= ~PIPECONF_DOUBLE_WIDE;
1881
1882
1883 if (!IS_I830(dev_priv))
1884 val &= ~PIPECONF_ENABLE;
1885
1886 I915_WRITE(reg, val);
1887 if ((val & PIPECONF_ENABLE) == 0)
1888 intel_wait_for_pipe_off(old_crtc_state);
1889 }
1890
1891 static unsigned int intel_tile_size(const struct drm_i915_private *dev_priv)
1892 {
1893 return IS_GEN(dev_priv, 2) ? 2048 : 4096;
1894 }
1895
1896 static unsigned int
1897 intel_tile_width_bytes(const struct drm_framebuffer *fb, int color_plane)
1898 {
1899 struct drm_i915_private *dev_priv = to_i915(fb->dev);
1900 unsigned int cpp = fb->format->cpp[color_plane];
1901
1902 switch (fb->modifier) {
1903 case DRM_FORMAT_MOD_LINEAR:
1904 return intel_tile_size(dev_priv);
1905 case I915_FORMAT_MOD_X_TILED:
1906 if (IS_GEN(dev_priv, 2))
1907 return 128;
1908 else
1909 return 512;
1910 case I915_FORMAT_MOD_Y_TILED_CCS:
1911 if (color_plane == 1)
1912 return 128;
1913
1914 case I915_FORMAT_MOD_Y_TILED:
1915 if (IS_GEN(dev_priv, 2) || HAS_128_BYTE_Y_TILING(dev_priv))
1916 return 128;
1917 else
1918 return 512;
1919 case I915_FORMAT_MOD_Yf_TILED_CCS:
1920 if (color_plane == 1)
1921 return 128;
1922
1923 case I915_FORMAT_MOD_Yf_TILED:
1924 switch (cpp) {
1925 case 1:
1926 return 64;
1927 case 2:
1928 case 4:
1929 return 128;
1930 case 8:
1931 case 16:
1932 return 256;
1933 default:
1934 MISSING_CASE(cpp);
1935 return cpp;
1936 }
1937 break;
1938 default:
1939 MISSING_CASE(fb->modifier);
1940 return cpp;
1941 }
1942 }
1943
1944 static unsigned int
1945 intel_tile_height(const struct drm_framebuffer *fb, int color_plane)
1946 {
1947 return intel_tile_size(to_i915(fb->dev)) /
1948 intel_tile_width_bytes(fb, color_plane);
1949 }
1950
1951
1952 static void intel_tile_dims(const struct drm_framebuffer *fb, int color_plane,
1953 unsigned int *tile_width,
1954 unsigned int *tile_height)
1955 {
1956 unsigned int tile_width_bytes = intel_tile_width_bytes(fb, color_plane);
1957 unsigned int cpp = fb->format->cpp[color_plane];
1958
1959 *tile_width = tile_width_bytes / cpp;
1960 *tile_height = intel_tile_size(to_i915(fb->dev)) / tile_width_bytes;
1961 }
1962
1963 unsigned int
1964 intel_fb_align_height(const struct drm_framebuffer *fb,
1965 int color_plane, unsigned int height)
1966 {
1967 unsigned int tile_height = intel_tile_height(fb, color_plane);
1968
1969 return ALIGN(height, tile_height);
1970 }
1971
1972 unsigned int intel_rotation_info_size(const struct intel_rotation_info *rot_info)
1973 {
1974 unsigned int size = 0;
1975 int i;
1976
1977 for (i = 0 ; i < ARRAY_SIZE(rot_info->plane); i++)
1978 size += rot_info->plane[i].width * rot_info->plane[i].height;
1979
1980 return size;
1981 }
1982
1983 unsigned int intel_remapped_info_size(const struct intel_remapped_info *rem_info)
1984 {
1985 unsigned int size = 0;
1986 int i;
1987
1988 for (i = 0 ; i < ARRAY_SIZE(rem_info->plane); i++)
1989 size += rem_info->plane[i].width * rem_info->plane[i].height;
1990
1991 return size;
1992 }
1993
1994 static void
1995 intel_fill_fb_ggtt_view(struct i915_ggtt_view *view,
1996 const struct drm_framebuffer *fb,
1997 unsigned int rotation)
1998 {
1999 view->type = I915_GGTT_VIEW_NORMAL;
2000 if (drm_rotation_90_or_270(rotation)) {
2001 view->type = I915_GGTT_VIEW_ROTATED;
2002 view->rotated = to_intel_framebuffer(fb)->rot_info;
2003 }
2004 }
2005
2006 static unsigned int intel_cursor_alignment(const struct drm_i915_private *dev_priv)
2007 {
2008 if (IS_I830(dev_priv))
2009 return 16 * 1024;
2010 else if (IS_I85X(dev_priv))
2011 return 256;
2012 else if (IS_I845G(dev_priv) || IS_I865G(dev_priv))
2013 return 32;
2014 else
2015 return 4 * 1024;
2016 }
2017
2018 static unsigned int intel_linear_alignment(const struct drm_i915_private *dev_priv)
2019 {
2020 if (INTEL_GEN(dev_priv) >= 9)
2021 return 256 * 1024;
2022 else if (IS_I965G(dev_priv) || IS_I965GM(dev_priv) ||
2023 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2024 return 128 * 1024;
2025 else if (INTEL_GEN(dev_priv) >= 4)
2026 return 4 * 1024;
2027 else
2028 return 0;
2029 }
2030
2031 static unsigned int intel_surf_alignment(const struct drm_framebuffer *fb,
2032 int color_plane)
2033 {
2034 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2035
2036
2037 if (color_plane == 1)
2038 return 4096;
2039
2040 switch (fb->modifier) {
2041 case DRM_FORMAT_MOD_LINEAR:
2042 return intel_linear_alignment(dev_priv);
2043 case I915_FORMAT_MOD_X_TILED:
2044 if (INTEL_GEN(dev_priv) >= 9)
2045 return 256 * 1024;
2046 return 0;
2047 case I915_FORMAT_MOD_Y_TILED_CCS:
2048 case I915_FORMAT_MOD_Yf_TILED_CCS:
2049 case I915_FORMAT_MOD_Y_TILED:
2050 case I915_FORMAT_MOD_Yf_TILED:
2051 return 1 * 1024 * 1024;
2052 default:
2053 MISSING_CASE(fb->modifier);
2054 return 0;
2055 }
2056 }
2057
2058 static bool intel_plane_uses_fence(const struct intel_plane_state *plane_state)
2059 {
2060 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
2061 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
2062
2063 return INTEL_GEN(dev_priv) < 4 ||
2064 (plane->has_fbc &&
2065 plane_state->view.type == I915_GGTT_VIEW_NORMAL);
2066 }
2067
2068 struct i915_vma *
2069 intel_pin_and_fence_fb_obj(struct drm_framebuffer *fb,
2070 const struct i915_ggtt_view *view,
2071 bool uses_fence,
2072 unsigned long *out_flags)
2073 {
2074 struct drm_device *dev = fb->dev;
2075 struct drm_i915_private *dev_priv = to_i915(dev);
2076 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
2077 intel_wakeref_t wakeref;
2078 struct i915_vma *vma;
2079 unsigned int pinctl;
2080 u32 alignment;
2081
2082 WARN_ON(!mutex_is_locked(&dev->struct_mutex));
2083
2084 alignment = intel_surf_alignment(fb, 0);
2085
2086
2087
2088
2089
2090
2091 if (intel_scanout_needs_vtd_wa(dev_priv) && alignment < 256 * 1024)
2092 alignment = 256 * 1024;
2093
2094
2095
2096
2097
2098
2099
2100
2101 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
2102 i915_gem_object_lock(obj);
2103
2104 atomic_inc(&dev_priv->gpu_error.pending_fb_pin);
2105
2106 pinctl = 0;
2107
2108
2109
2110
2111
2112
2113
2114
2115 if (HAS_GMCH(dev_priv))
2116 pinctl |= PIN_MAPPABLE;
2117
2118 vma = i915_gem_object_pin_to_display_plane(obj,
2119 alignment, view, pinctl);
2120 if (IS_ERR(vma))
2121 goto err;
2122
2123 if (uses_fence && i915_vma_is_map_and_fenceable(vma)) {
2124 int ret;
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142 ret = i915_vma_pin_fence(vma);
2143 if (ret != 0 && INTEL_GEN(dev_priv) < 4) {
2144 i915_gem_object_unpin_from_display_plane(vma);
2145 vma = ERR_PTR(ret);
2146 goto err;
2147 }
2148
2149 if (ret == 0 && vma->fence)
2150 *out_flags |= PLANE_HAS_FENCE;
2151 }
2152
2153 i915_vma_get(vma);
2154 err:
2155 atomic_dec(&dev_priv->gpu_error.pending_fb_pin);
2156
2157 i915_gem_object_unlock(obj);
2158 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
2159 return vma;
2160 }
2161
2162 void intel_unpin_fb_vma(struct i915_vma *vma, unsigned long flags)
2163 {
2164 lockdep_assert_held(&vma->vm->i915->drm.struct_mutex);
2165
2166 i915_gem_object_lock(vma->obj);
2167 if (flags & PLANE_HAS_FENCE)
2168 i915_vma_unpin_fence(vma);
2169 i915_gem_object_unpin_from_display_plane(vma);
2170 i915_gem_object_unlock(vma->obj);
2171
2172 i915_vma_put(vma);
2173 }
2174
2175 static int intel_fb_pitch(const struct drm_framebuffer *fb, int color_plane,
2176 unsigned int rotation)
2177 {
2178 if (drm_rotation_90_or_270(rotation))
2179 return to_intel_framebuffer(fb)->rotated[color_plane].pitch;
2180 else
2181 return fb->pitches[color_plane];
2182 }
2183
2184
2185
2186
2187
2188
2189
2190 u32 intel_fb_xy_to_linear(int x, int y,
2191 const struct intel_plane_state *state,
2192 int color_plane)
2193 {
2194 const struct drm_framebuffer *fb = state->base.fb;
2195 unsigned int cpp = fb->format->cpp[color_plane];
2196 unsigned int pitch = state->color_plane[color_plane].stride;
2197
2198 return y * pitch + x * cpp;
2199 }
2200
2201
2202
2203
2204
2205
2206 void intel_add_fb_offsets(int *x, int *y,
2207 const struct intel_plane_state *state,
2208 int color_plane)
2209
2210 {
2211 *x += state->color_plane[color_plane].x;
2212 *y += state->color_plane[color_plane].y;
2213 }
2214
2215 static u32 intel_adjust_tile_offset(int *x, int *y,
2216 unsigned int tile_width,
2217 unsigned int tile_height,
2218 unsigned int tile_size,
2219 unsigned int pitch_tiles,
2220 u32 old_offset,
2221 u32 new_offset)
2222 {
2223 unsigned int pitch_pixels = pitch_tiles * tile_width;
2224 unsigned int tiles;
2225
2226 WARN_ON(old_offset & (tile_size - 1));
2227 WARN_ON(new_offset & (tile_size - 1));
2228 WARN_ON(new_offset > old_offset);
2229
2230 tiles = (old_offset - new_offset) / tile_size;
2231
2232 *y += tiles / pitch_tiles * tile_height;
2233 *x += tiles % pitch_tiles * tile_width;
2234
2235
2236 *y += *x / pitch_pixels * tile_height;
2237 *x %= pitch_pixels;
2238
2239 return new_offset;
2240 }
2241
2242 static bool is_surface_linear(u64 modifier, int color_plane)
2243 {
2244 return modifier == DRM_FORMAT_MOD_LINEAR;
2245 }
2246
2247 static u32 intel_adjust_aligned_offset(int *x, int *y,
2248 const struct drm_framebuffer *fb,
2249 int color_plane,
2250 unsigned int rotation,
2251 unsigned int pitch,
2252 u32 old_offset, u32 new_offset)
2253 {
2254 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2255 unsigned int cpp = fb->format->cpp[color_plane];
2256
2257 WARN_ON(new_offset > old_offset);
2258
2259 if (!is_surface_linear(fb->modifier, color_plane)) {
2260 unsigned int tile_size, tile_width, tile_height;
2261 unsigned int pitch_tiles;
2262
2263 tile_size = intel_tile_size(dev_priv);
2264 intel_tile_dims(fb, color_plane, &tile_width, &tile_height);
2265
2266 if (drm_rotation_90_or_270(rotation)) {
2267 pitch_tiles = pitch / tile_height;
2268 swap(tile_width, tile_height);
2269 } else {
2270 pitch_tiles = pitch / (tile_width * cpp);
2271 }
2272
2273 intel_adjust_tile_offset(x, y, tile_width, tile_height,
2274 tile_size, pitch_tiles,
2275 old_offset, new_offset);
2276 } else {
2277 old_offset += *y * pitch + *x * cpp;
2278
2279 *y = (old_offset - new_offset) / pitch;
2280 *x = ((old_offset - new_offset) - *y * pitch) / cpp;
2281 }
2282
2283 return new_offset;
2284 }
2285
2286
2287
2288
2289
2290 static u32 intel_plane_adjust_aligned_offset(int *x, int *y,
2291 const struct intel_plane_state *state,
2292 int color_plane,
2293 u32 old_offset, u32 new_offset)
2294 {
2295 return intel_adjust_aligned_offset(x, y, state->base.fb, color_plane,
2296 state->base.rotation,
2297 state->color_plane[color_plane].stride,
2298 old_offset, new_offset);
2299 }
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315 static u32 intel_compute_aligned_offset(struct drm_i915_private *dev_priv,
2316 int *x, int *y,
2317 const struct drm_framebuffer *fb,
2318 int color_plane,
2319 unsigned int pitch,
2320 unsigned int rotation,
2321 u32 alignment)
2322 {
2323 unsigned int cpp = fb->format->cpp[color_plane];
2324 u32 offset, offset_aligned;
2325
2326 if (alignment)
2327 alignment--;
2328
2329 if (!is_surface_linear(fb->modifier, color_plane)) {
2330 unsigned int tile_size, tile_width, tile_height;
2331 unsigned int tile_rows, tiles, pitch_tiles;
2332
2333 tile_size = intel_tile_size(dev_priv);
2334 intel_tile_dims(fb, color_plane, &tile_width, &tile_height);
2335
2336 if (drm_rotation_90_or_270(rotation)) {
2337 pitch_tiles = pitch / tile_height;
2338 swap(tile_width, tile_height);
2339 } else {
2340 pitch_tiles = pitch / (tile_width * cpp);
2341 }
2342
2343 tile_rows = *y / tile_height;
2344 *y %= tile_height;
2345
2346 tiles = *x / tile_width;
2347 *x %= tile_width;
2348
2349 offset = (tile_rows * pitch_tiles + tiles) * tile_size;
2350 offset_aligned = offset & ~alignment;
2351
2352 intel_adjust_tile_offset(x, y, tile_width, tile_height,
2353 tile_size, pitch_tiles,
2354 offset, offset_aligned);
2355 } else {
2356 offset = *y * pitch + *x * cpp;
2357 offset_aligned = offset & ~alignment;
2358
2359 *y = (offset & alignment) / pitch;
2360 *x = ((offset & alignment) - *y * pitch) / cpp;
2361 }
2362
2363 return offset_aligned;
2364 }
2365
2366 static u32 intel_plane_compute_aligned_offset(int *x, int *y,
2367 const struct intel_plane_state *state,
2368 int color_plane)
2369 {
2370 struct intel_plane *intel_plane = to_intel_plane(state->base.plane);
2371 struct drm_i915_private *dev_priv = to_i915(intel_plane->base.dev);
2372 const struct drm_framebuffer *fb = state->base.fb;
2373 unsigned int rotation = state->base.rotation;
2374 int pitch = state->color_plane[color_plane].stride;
2375 u32 alignment;
2376
2377 if (intel_plane->id == PLANE_CURSOR)
2378 alignment = intel_cursor_alignment(dev_priv);
2379 else
2380 alignment = intel_surf_alignment(fb, color_plane);
2381
2382 return intel_compute_aligned_offset(dev_priv, x, y, fb, color_plane,
2383 pitch, rotation, alignment);
2384 }
2385
2386
2387 static int intel_fb_offset_to_xy(int *x, int *y,
2388 const struct drm_framebuffer *fb,
2389 int color_plane)
2390 {
2391 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2392 unsigned int height;
2393
2394 if (fb->modifier != DRM_FORMAT_MOD_LINEAR &&
2395 fb->offsets[color_plane] % intel_tile_size(dev_priv)) {
2396 DRM_DEBUG_KMS("Misaligned offset 0x%08x for color plane %d\n",
2397 fb->offsets[color_plane], color_plane);
2398 return -EINVAL;
2399 }
2400
2401 height = drm_framebuffer_plane_height(fb->height, fb, color_plane);
2402 height = ALIGN(height, intel_tile_height(fb, color_plane));
2403
2404
2405 if (add_overflows_t(u32, mul_u32_u32(height, fb->pitches[color_plane]),
2406 fb->offsets[color_plane])) {
2407 DRM_DEBUG_KMS("Bad offset 0x%08x or pitch %d for color plane %d\n",
2408 fb->offsets[color_plane], fb->pitches[color_plane],
2409 color_plane);
2410 return -ERANGE;
2411 }
2412
2413 *x = 0;
2414 *y = 0;
2415
2416 intel_adjust_aligned_offset(x, y,
2417 fb, color_plane, DRM_MODE_ROTATE_0,
2418 fb->pitches[color_plane],
2419 fb->offsets[color_plane], 0);
2420
2421 return 0;
2422 }
2423
2424 static unsigned int intel_fb_modifier_to_tiling(u64 fb_modifier)
2425 {
2426 switch (fb_modifier) {
2427 case I915_FORMAT_MOD_X_TILED:
2428 return I915_TILING_X;
2429 case I915_FORMAT_MOD_Y_TILED:
2430 case I915_FORMAT_MOD_Y_TILED_CCS:
2431 return I915_TILING_Y;
2432 default:
2433 return I915_TILING_NONE;
2434 }
2435 }
2436
2437
2438
2439
2440
2441
2442
2443
2444
2445
2446
2447
2448
2449
2450
2451 static const struct drm_format_info ccs_formats[] = {
2452 { .format = DRM_FORMAT_XRGB8888, .depth = 24, .num_planes = 2,
2453 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, },
2454 { .format = DRM_FORMAT_XBGR8888, .depth = 24, .num_planes = 2,
2455 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, },
2456 { .format = DRM_FORMAT_ARGB8888, .depth = 32, .num_planes = 2,
2457 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, .has_alpha = true, },
2458 { .format = DRM_FORMAT_ABGR8888, .depth = 32, .num_planes = 2,
2459 .cpp = { 4, 1, }, .hsub = 8, .vsub = 16, .has_alpha = true, },
2460 };
2461
2462 static const struct drm_format_info *
2463 lookup_format_info(const struct drm_format_info formats[],
2464 int num_formats, u32 format)
2465 {
2466 int i;
2467
2468 for (i = 0; i < num_formats; i++) {
2469 if (formats[i].format == format)
2470 return &formats[i];
2471 }
2472
2473 return NULL;
2474 }
2475
2476 static const struct drm_format_info *
2477 intel_get_format_info(const struct drm_mode_fb_cmd2 *cmd)
2478 {
2479 switch (cmd->modifier[0]) {
2480 case I915_FORMAT_MOD_Y_TILED_CCS:
2481 case I915_FORMAT_MOD_Yf_TILED_CCS:
2482 return lookup_format_info(ccs_formats,
2483 ARRAY_SIZE(ccs_formats),
2484 cmd->pixel_format);
2485 default:
2486 return NULL;
2487 }
2488 }
2489
2490 bool is_ccs_modifier(u64 modifier)
2491 {
2492 return modifier == I915_FORMAT_MOD_Y_TILED_CCS ||
2493 modifier == I915_FORMAT_MOD_Yf_TILED_CCS;
2494 }
2495
2496 u32 intel_plane_fb_max_stride(struct drm_i915_private *dev_priv,
2497 u32 pixel_format, u64 modifier)
2498 {
2499 struct intel_crtc *crtc;
2500 struct intel_plane *plane;
2501
2502
2503
2504
2505
2506 crtc = intel_get_crtc_for_pipe(dev_priv, PIPE_A);
2507 if (!crtc)
2508 return 0;
2509
2510 plane = to_intel_plane(crtc->base.primary);
2511
2512 return plane->max_stride(plane, pixel_format, modifier,
2513 DRM_MODE_ROTATE_0);
2514 }
2515
2516 static
2517 u32 intel_fb_max_stride(struct drm_i915_private *dev_priv,
2518 u32 pixel_format, u64 modifier)
2519 {
2520
2521
2522
2523
2524
2525
2526 if (!is_ccs_modifier(modifier)) {
2527 if (INTEL_GEN(dev_priv) >= 7)
2528 return 256*1024;
2529 else if (INTEL_GEN(dev_priv) >= 4)
2530 return 128*1024;
2531 }
2532
2533 return intel_plane_fb_max_stride(dev_priv, pixel_format, modifier);
2534 }
2535
2536 static u32
2537 intel_fb_stride_alignment(const struct drm_framebuffer *fb, int color_plane)
2538 {
2539 struct drm_i915_private *dev_priv = to_i915(fb->dev);
2540
2541 if (fb->modifier == DRM_FORMAT_MOD_LINEAR) {
2542 u32 max_stride = intel_plane_fb_max_stride(dev_priv,
2543 fb->format->format,
2544 fb->modifier);
2545
2546
2547
2548
2549
2550 if (fb->pitches[color_plane] > max_stride)
2551 return intel_tile_size(dev_priv);
2552 else
2553 return 64;
2554 } else {
2555 return intel_tile_width_bytes(fb, color_plane);
2556 }
2557 }
2558
2559 bool intel_plane_can_remap(const struct intel_plane_state *plane_state)
2560 {
2561 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
2562 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
2563 const struct drm_framebuffer *fb = plane_state->base.fb;
2564 int i;
2565
2566
2567 if (plane->id == PLANE_CURSOR)
2568 return false;
2569
2570
2571
2572
2573
2574
2575
2576 if (INTEL_GEN(dev_priv) < 4)
2577 return false;
2578
2579
2580
2581
2582
2583 if (is_ccs_modifier(fb->modifier))
2584 return false;
2585
2586
2587 if (fb->modifier == DRM_FORMAT_MOD_LINEAR) {
2588 unsigned int alignment = intel_tile_size(dev_priv) - 1;
2589
2590 for (i = 0; i < fb->format->num_planes; i++) {
2591 if (fb->pitches[i] & alignment)
2592 return false;
2593 }
2594 }
2595
2596 return true;
2597 }
2598
2599 static bool intel_plane_needs_remap(const struct intel_plane_state *plane_state)
2600 {
2601 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
2602 const struct drm_framebuffer *fb = plane_state->base.fb;
2603 unsigned int rotation = plane_state->base.rotation;
2604 u32 stride, max_stride;
2605
2606
2607
2608
2609
2610 if (!plane_state->base.visible)
2611 return false;
2612
2613 if (!intel_plane_can_remap(plane_state))
2614 return false;
2615
2616
2617
2618
2619
2620 stride = intel_fb_pitch(fb, 0, rotation);
2621 max_stride = plane->max_stride(plane, fb->format->format,
2622 fb->modifier, rotation);
2623
2624 return stride > max_stride;
2625 }
2626
2627 static int
2628 intel_fill_fb_info(struct drm_i915_private *dev_priv,
2629 struct drm_framebuffer *fb)
2630 {
2631 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
2632 struct intel_rotation_info *rot_info = &intel_fb->rot_info;
2633 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
2634 u32 gtt_offset_rotated = 0;
2635 unsigned int max_size = 0;
2636 int i, num_planes = fb->format->num_planes;
2637 unsigned int tile_size = intel_tile_size(dev_priv);
2638
2639 for (i = 0; i < num_planes; i++) {
2640 unsigned int width, height;
2641 unsigned int cpp, size;
2642 u32 offset;
2643 int x, y;
2644 int ret;
2645
2646 cpp = fb->format->cpp[i];
2647 width = drm_framebuffer_plane_width(fb->width, fb, i);
2648 height = drm_framebuffer_plane_height(fb->height, fb, i);
2649
2650 ret = intel_fb_offset_to_xy(&x, &y, fb, i);
2651 if (ret) {
2652 DRM_DEBUG_KMS("bad fb plane %d offset: 0x%x\n",
2653 i, fb->offsets[i]);
2654 return ret;
2655 }
2656
2657 if (is_ccs_modifier(fb->modifier) && i == 1) {
2658 int hsub = fb->format->hsub;
2659 int vsub = fb->format->vsub;
2660 int tile_width, tile_height;
2661 int main_x, main_y;
2662 int ccs_x, ccs_y;
2663
2664 intel_tile_dims(fb, i, &tile_width, &tile_height);
2665 tile_width *= hsub;
2666 tile_height *= vsub;
2667
2668 ccs_x = (x * hsub) % tile_width;
2669 ccs_y = (y * vsub) % tile_height;
2670 main_x = intel_fb->normal[0].x % tile_width;
2671 main_y = intel_fb->normal[0].y % tile_height;
2672
2673
2674
2675
2676
2677 if (main_x != ccs_x || main_y != ccs_y) {
2678 DRM_DEBUG_KMS("Bad CCS x/y (main %d,%d ccs %d,%d) full (main %d,%d ccs %d,%d)\n",
2679 main_x, main_y,
2680 ccs_x, ccs_y,
2681 intel_fb->normal[0].x,
2682 intel_fb->normal[0].y,
2683 x, y);
2684 return -EINVAL;
2685 }
2686 }
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697 if (i == 0 && i915_gem_object_is_tiled(obj) &&
2698 (x + width) * cpp > fb->pitches[i]) {
2699 DRM_DEBUG_KMS("bad fb plane %d offset: 0x%x\n",
2700 i, fb->offsets[i]);
2701 return -EINVAL;
2702 }
2703
2704
2705
2706
2707
2708 intel_fb->normal[i].x = x;
2709 intel_fb->normal[i].y = y;
2710
2711 offset = intel_compute_aligned_offset(dev_priv, &x, &y, fb, i,
2712 fb->pitches[i],
2713 DRM_MODE_ROTATE_0,
2714 tile_size);
2715 offset /= tile_size;
2716
2717 if (!is_surface_linear(fb->modifier, i)) {
2718 unsigned int tile_width, tile_height;
2719 unsigned int pitch_tiles;
2720 struct drm_rect r;
2721
2722 intel_tile_dims(fb, i, &tile_width, &tile_height);
2723
2724 rot_info->plane[i].offset = offset;
2725 rot_info->plane[i].stride = DIV_ROUND_UP(fb->pitches[i], tile_width * cpp);
2726 rot_info->plane[i].width = DIV_ROUND_UP(x + width, tile_width);
2727 rot_info->plane[i].height = DIV_ROUND_UP(y + height, tile_height);
2728
2729 intel_fb->rotated[i].pitch =
2730 rot_info->plane[i].height * tile_height;
2731
2732
2733 size = rot_info->plane[i].stride * rot_info->plane[i].height;
2734
2735
2736
2737
2738 if (x != 0)
2739 size++;
2740
2741
2742 r.x1 = x;
2743 r.y1 = y;
2744 r.x2 = x + width;
2745 r.y2 = y + height;
2746 drm_rect_rotate(&r,
2747 rot_info->plane[i].width * tile_width,
2748 rot_info->plane[i].height * tile_height,
2749 DRM_MODE_ROTATE_270);
2750 x = r.x1;
2751 y = r.y1;
2752
2753
2754 pitch_tiles = intel_fb->rotated[i].pitch / tile_height;
2755 swap(tile_width, tile_height);
2756
2757
2758
2759
2760
2761 intel_adjust_tile_offset(&x, &y,
2762 tile_width, tile_height,
2763 tile_size, pitch_tiles,
2764 gtt_offset_rotated * tile_size, 0);
2765
2766 gtt_offset_rotated += rot_info->plane[i].width * rot_info->plane[i].height;
2767
2768
2769
2770
2771
2772 intel_fb->rotated[i].x = x;
2773 intel_fb->rotated[i].y = y;
2774 } else {
2775 size = DIV_ROUND_UP((y + height) * fb->pitches[i] +
2776 x * cpp, tile_size);
2777 }
2778
2779
2780 max_size = max(max_size, offset + size);
2781 }
2782
2783 if (mul_u32_u32(max_size, tile_size) > obj->base.size) {
2784 DRM_DEBUG_KMS("fb too big for bo (need %llu bytes, have %zu bytes)\n",
2785 mul_u32_u32(max_size, tile_size), obj->base.size);
2786 return -EINVAL;
2787 }
2788
2789 return 0;
2790 }
2791
2792 static void
2793 intel_plane_remap_gtt(struct intel_plane_state *plane_state)
2794 {
2795 struct drm_i915_private *dev_priv =
2796 to_i915(plane_state->base.plane->dev);
2797 struct drm_framebuffer *fb = plane_state->base.fb;
2798 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
2799 struct intel_rotation_info *info = &plane_state->view.rotated;
2800 unsigned int rotation = plane_state->base.rotation;
2801 int i, num_planes = fb->format->num_planes;
2802 unsigned int tile_size = intel_tile_size(dev_priv);
2803 unsigned int src_x, src_y;
2804 unsigned int src_w, src_h;
2805 u32 gtt_offset = 0;
2806
2807 memset(&plane_state->view, 0, sizeof(plane_state->view));
2808 plane_state->view.type = drm_rotation_90_or_270(rotation) ?
2809 I915_GGTT_VIEW_ROTATED : I915_GGTT_VIEW_REMAPPED;
2810
2811 src_x = plane_state->base.src.x1 >> 16;
2812 src_y = plane_state->base.src.y1 >> 16;
2813 src_w = drm_rect_width(&plane_state->base.src) >> 16;
2814 src_h = drm_rect_height(&plane_state->base.src) >> 16;
2815
2816 WARN_ON(is_ccs_modifier(fb->modifier));
2817
2818
2819 drm_rect_translate(&plane_state->base.src,
2820 -(src_x << 16), -(src_y << 16));
2821
2822
2823 if (drm_rotation_90_or_270(rotation))
2824 drm_rect_rotate(&plane_state->base.src,
2825 src_w << 16, src_h << 16,
2826 DRM_MODE_ROTATE_270);
2827
2828 for (i = 0; i < num_planes; i++) {
2829 unsigned int hsub = i ? fb->format->hsub : 1;
2830 unsigned int vsub = i ? fb->format->vsub : 1;
2831 unsigned int cpp = fb->format->cpp[i];
2832 unsigned int tile_width, tile_height;
2833 unsigned int width, height;
2834 unsigned int pitch_tiles;
2835 unsigned int x, y;
2836 u32 offset;
2837
2838 intel_tile_dims(fb, i, &tile_width, &tile_height);
2839
2840 x = src_x / hsub;
2841 y = src_y / vsub;
2842 width = src_w / hsub;
2843 height = src_h / vsub;
2844
2845
2846
2847
2848
2849 x += intel_fb->normal[i].x;
2850 y += intel_fb->normal[i].y;
2851
2852 offset = intel_compute_aligned_offset(dev_priv, &x, &y,
2853 fb, i, fb->pitches[i],
2854 DRM_MODE_ROTATE_0, tile_size);
2855 offset /= tile_size;
2856
2857 info->plane[i].offset = offset;
2858 info->plane[i].stride = DIV_ROUND_UP(fb->pitches[i],
2859 tile_width * cpp);
2860 info->plane[i].width = DIV_ROUND_UP(x + width, tile_width);
2861 info->plane[i].height = DIV_ROUND_UP(y + height, tile_height);
2862
2863 if (drm_rotation_90_or_270(rotation)) {
2864 struct drm_rect r;
2865
2866
2867 r.x1 = x;
2868 r.y1 = y;
2869 r.x2 = x + width;
2870 r.y2 = y + height;
2871 drm_rect_rotate(&r,
2872 info->plane[i].width * tile_width,
2873 info->plane[i].height * tile_height,
2874 DRM_MODE_ROTATE_270);
2875 x = r.x1;
2876 y = r.y1;
2877
2878 pitch_tiles = info->plane[i].height;
2879 plane_state->color_plane[i].stride = pitch_tiles * tile_height;
2880
2881
2882 swap(tile_width, tile_height);
2883 } else {
2884 pitch_tiles = info->plane[i].width;
2885 plane_state->color_plane[i].stride = pitch_tiles * tile_width * cpp;
2886 }
2887
2888
2889
2890
2891
2892 intel_adjust_tile_offset(&x, &y,
2893 tile_width, tile_height,
2894 tile_size, pitch_tiles,
2895 gtt_offset * tile_size, 0);
2896
2897 gtt_offset += info->plane[i].width * info->plane[i].height;
2898
2899 plane_state->color_plane[i].offset = 0;
2900 plane_state->color_plane[i].x = x;
2901 plane_state->color_plane[i].y = y;
2902 }
2903 }
2904
2905 static int
2906 intel_plane_compute_gtt(struct intel_plane_state *plane_state)
2907 {
2908 const struct intel_framebuffer *fb =
2909 to_intel_framebuffer(plane_state->base.fb);
2910 unsigned int rotation = plane_state->base.rotation;
2911 int i, num_planes;
2912
2913 if (!fb)
2914 return 0;
2915
2916 num_planes = fb->base.format->num_planes;
2917
2918 if (intel_plane_needs_remap(plane_state)) {
2919 intel_plane_remap_gtt(plane_state);
2920
2921
2922
2923
2924
2925
2926
2927 return intel_plane_check_stride(plane_state);
2928 }
2929
2930 intel_fill_fb_ggtt_view(&plane_state->view, &fb->base, rotation);
2931
2932 for (i = 0; i < num_planes; i++) {
2933 plane_state->color_plane[i].stride = intel_fb_pitch(&fb->base, i, rotation);
2934 plane_state->color_plane[i].offset = 0;
2935
2936 if (drm_rotation_90_or_270(rotation)) {
2937 plane_state->color_plane[i].x = fb->rotated[i].x;
2938 plane_state->color_plane[i].y = fb->rotated[i].y;
2939 } else {
2940 plane_state->color_plane[i].x = fb->normal[i].x;
2941 plane_state->color_plane[i].y = fb->normal[i].y;
2942 }
2943 }
2944
2945
2946 if (drm_rotation_90_or_270(rotation))
2947 drm_rect_rotate(&plane_state->base.src,
2948 fb->base.width << 16, fb->base.height << 16,
2949 DRM_MODE_ROTATE_270);
2950
2951 return intel_plane_check_stride(plane_state);
2952 }
2953
2954 static int i9xx_format_to_fourcc(int format)
2955 {
2956 switch (format) {
2957 case DISPPLANE_8BPP:
2958 return DRM_FORMAT_C8;
2959 case DISPPLANE_BGRX555:
2960 return DRM_FORMAT_XRGB1555;
2961 case DISPPLANE_BGRX565:
2962 return DRM_FORMAT_RGB565;
2963 default:
2964 case DISPPLANE_BGRX888:
2965 return DRM_FORMAT_XRGB8888;
2966 case DISPPLANE_RGBX888:
2967 return DRM_FORMAT_XBGR8888;
2968 case DISPPLANE_BGRX101010:
2969 return DRM_FORMAT_XRGB2101010;
2970 case DISPPLANE_RGBX101010:
2971 return DRM_FORMAT_XBGR2101010;
2972 }
2973 }
2974
2975 int skl_format_to_fourcc(int format, bool rgb_order, bool alpha)
2976 {
2977 switch (format) {
2978 case PLANE_CTL_FORMAT_RGB_565:
2979 return DRM_FORMAT_RGB565;
2980 case PLANE_CTL_FORMAT_NV12:
2981 return DRM_FORMAT_NV12;
2982 case PLANE_CTL_FORMAT_P010:
2983 return DRM_FORMAT_P010;
2984 case PLANE_CTL_FORMAT_P012:
2985 return DRM_FORMAT_P012;
2986 case PLANE_CTL_FORMAT_P016:
2987 return DRM_FORMAT_P016;
2988 case PLANE_CTL_FORMAT_Y210:
2989 return DRM_FORMAT_Y210;
2990 case PLANE_CTL_FORMAT_Y212:
2991 return DRM_FORMAT_Y212;
2992 case PLANE_CTL_FORMAT_Y216:
2993 return DRM_FORMAT_Y216;
2994 case PLANE_CTL_FORMAT_Y410:
2995 return DRM_FORMAT_XVYU2101010;
2996 case PLANE_CTL_FORMAT_Y412:
2997 return DRM_FORMAT_XVYU12_16161616;
2998 case PLANE_CTL_FORMAT_Y416:
2999 return DRM_FORMAT_XVYU16161616;
3000 default:
3001 case PLANE_CTL_FORMAT_XRGB_8888:
3002 if (rgb_order) {
3003 if (alpha)
3004 return DRM_FORMAT_ABGR8888;
3005 else
3006 return DRM_FORMAT_XBGR8888;
3007 } else {
3008 if (alpha)
3009 return DRM_FORMAT_ARGB8888;
3010 else
3011 return DRM_FORMAT_XRGB8888;
3012 }
3013 case PLANE_CTL_FORMAT_XRGB_2101010:
3014 if (rgb_order)
3015 return DRM_FORMAT_XBGR2101010;
3016 else
3017 return DRM_FORMAT_XRGB2101010;
3018 case PLANE_CTL_FORMAT_XRGB_16161616F:
3019 if (rgb_order) {
3020 if (alpha)
3021 return DRM_FORMAT_ABGR16161616F;
3022 else
3023 return DRM_FORMAT_XBGR16161616F;
3024 } else {
3025 if (alpha)
3026 return DRM_FORMAT_ARGB16161616F;
3027 else
3028 return DRM_FORMAT_XRGB16161616F;
3029 }
3030 }
3031 }
3032
3033 static bool
3034 intel_alloc_initial_plane_obj(struct intel_crtc *crtc,
3035 struct intel_initial_plane_config *plane_config)
3036 {
3037 struct drm_device *dev = crtc->base.dev;
3038 struct drm_i915_private *dev_priv = to_i915(dev);
3039 struct drm_mode_fb_cmd2 mode_cmd = { 0 };
3040 struct drm_framebuffer *fb = &plane_config->fb->base;
3041 u32 base_aligned = round_down(plane_config->base, PAGE_SIZE);
3042 u32 size_aligned = round_up(plane_config->base + plane_config->size,
3043 PAGE_SIZE);
3044 struct drm_i915_gem_object *obj;
3045 bool ret = false;
3046
3047 size_aligned -= base_aligned;
3048
3049 if (plane_config->size == 0)
3050 return false;
3051
3052
3053
3054
3055 if (size_aligned * 2 > dev_priv->stolen_usable_size)
3056 return false;
3057
3058 switch (fb->modifier) {
3059 case DRM_FORMAT_MOD_LINEAR:
3060 case I915_FORMAT_MOD_X_TILED:
3061 case I915_FORMAT_MOD_Y_TILED:
3062 break;
3063 default:
3064 DRM_DEBUG_DRIVER("Unsupported modifier for initial FB: 0x%llx\n",
3065 fb->modifier);
3066 return false;
3067 }
3068
3069 mutex_lock(&dev->struct_mutex);
3070 obj = i915_gem_object_create_stolen_for_preallocated(dev_priv,
3071 base_aligned,
3072 base_aligned,
3073 size_aligned);
3074 mutex_unlock(&dev->struct_mutex);
3075 if (!obj)
3076 return false;
3077
3078 switch (plane_config->tiling) {
3079 case I915_TILING_NONE:
3080 break;
3081 case I915_TILING_X:
3082 case I915_TILING_Y:
3083 obj->tiling_and_stride = fb->pitches[0] | plane_config->tiling;
3084 break;
3085 default:
3086 MISSING_CASE(plane_config->tiling);
3087 goto out;
3088 }
3089
3090 mode_cmd.pixel_format = fb->format->format;
3091 mode_cmd.width = fb->width;
3092 mode_cmd.height = fb->height;
3093 mode_cmd.pitches[0] = fb->pitches[0];
3094 mode_cmd.modifier[0] = fb->modifier;
3095 mode_cmd.flags = DRM_MODE_FB_MODIFIERS;
3096
3097 if (intel_framebuffer_init(to_intel_framebuffer(fb), obj, &mode_cmd)) {
3098 DRM_DEBUG_KMS("intel fb init failed\n");
3099 goto out;
3100 }
3101
3102
3103 DRM_DEBUG_KMS("initial plane fb obj %p\n", obj);
3104 ret = true;
3105 out:
3106 i915_gem_object_put(obj);
3107 return ret;
3108 }
3109
3110 static void
3111 intel_set_plane_visible(struct intel_crtc_state *crtc_state,
3112 struct intel_plane_state *plane_state,
3113 bool visible)
3114 {
3115 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
3116
3117 plane_state->base.visible = visible;
3118
3119 if (visible)
3120 crtc_state->base.plane_mask |= drm_plane_mask(&plane->base);
3121 else
3122 crtc_state->base.plane_mask &= ~drm_plane_mask(&plane->base);
3123 }
3124
3125 static void fixup_active_planes(struct intel_crtc_state *crtc_state)
3126 {
3127 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
3128 struct drm_plane *plane;
3129
3130
3131
3132
3133
3134
3135 crtc_state->active_planes = 0;
3136
3137 drm_for_each_plane_mask(plane, &dev_priv->drm,
3138 crtc_state->base.plane_mask)
3139 crtc_state->active_planes |= BIT(to_intel_plane(plane)->id);
3140 }
3141
3142 static void intel_plane_disable_noatomic(struct intel_crtc *crtc,
3143 struct intel_plane *plane)
3144 {
3145 struct intel_crtc_state *crtc_state =
3146 to_intel_crtc_state(crtc->base.state);
3147 struct intel_plane_state *plane_state =
3148 to_intel_plane_state(plane->base.state);
3149
3150 DRM_DEBUG_KMS("Disabling [PLANE:%d:%s] on [CRTC:%d:%s]\n",
3151 plane->base.base.id, plane->base.name,
3152 crtc->base.base.id, crtc->base.name);
3153
3154 intel_set_plane_visible(crtc_state, plane_state, false);
3155 fixup_active_planes(crtc_state);
3156 crtc_state->data_rate[plane->id] = 0;
3157
3158 if (plane->id == PLANE_PRIMARY)
3159 intel_pre_disable_primary_noatomic(&crtc->base);
3160
3161 intel_disable_plane(plane, crtc_state);
3162 }
3163
3164 static struct intel_frontbuffer *
3165 to_intel_frontbuffer(struct drm_framebuffer *fb)
3166 {
3167 return fb ? to_intel_framebuffer(fb)->frontbuffer : NULL;
3168 }
3169
3170 static void
3171 intel_find_initial_plane_obj(struct intel_crtc *intel_crtc,
3172 struct intel_initial_plane_config *plane_config)
3173 {
3174 struct drm_device *dev = intel_crtc->base.dev;
3175 struct drm_i915_private *dev_priv = to_i915(dev);
3176 struct drm_crtc *c;
3177 struct drm_plane *primary = intel_crtc->base.primary;
3178 struct drm_plane_state *plane_state = primary->state;
3179 struct intel_plane *intel_plane = to_intel_plane(primary);
3180 struct intel_plane_state *intel_state =
3181 to_intel_plane_state(plane_state);
3182 struct drm_framebuffer *fb;
3183
3184 if (!plane_config->fb)
3185 return;
3186
3187 if (intel_alloc_initial_plane_obj(intel_crtc, plane_config)) {
3188 fb = &plane_config->fb->base;
3189 goto valid_fb;
3190 }
3191
3192 kfree(plane_config->fb);
3193
3194
3195
3196
3197
3198 for_each_crtc(dev, c) {
3199 struct intel_plane_state *state;
3200
3201 if (c == &intel_crtc->base)
3202 continue;
3203
3204 if (!to_intel_crtc(c)->active)
3205 continue;
3206
3207 state = to_intel_plane_state(c->primary->state);
3208 if (!state->vma)
3209 continue;
3210
3211 if (intel_plane_ggtt_offset(state) == plane_config->base) {
3212 fb = state->base.fb;
3213 drm_framebuffer_get(fb);
3214 goto valid_fb;
3215 }
3216 }
3217
3218
3219
3220
3221
3222
3223
3224
3225 intel_plane_disable_noatomic(intel_crtc, intel_plane);
3226
3227 return;
3228
3229 valid_fb:
3230 intel_state->base.rotation = plane_config->rotation;
3231 intel_fill_fb_ggtt_view(&intel_state->view, fb,
3232 intel_state->base.rotation);
3233 intel_state->color_plane[0].stride =
3234 intel_fb_pitch(fb, 0, intel_state->base.rotation);
3235
3236 mutex_lock(&dev->struct_mutex);
3237 intel_state->vma =
3238 intel_pin_and_fence_fb_obj(fb,
3239 &intel_state->view,
3240 intel_plane_uses_fence(intel_state),
3241 &intel_state->flags);
3242 mutex_unlock(&dev->struct_mutex);
3243 if (IS_ERR(intel_state->vma)) {
3244 DRM_ERROR("failed to pin boot fb on pipe %d: %li\n",
3245 intel_crtc->pipe, PTR_ERR(intel_state->vma));
3246
3247 intel_state->vma = NULL;
3248 drm_framebuffer_put(fb);
3249 return;
3250 }
3251
3252 intel_frontbuffer_flush(to_intel_frontbuffer(fb), ORIGIN_DIRTYFB);
3253
3254 plane_state->src_x = 0;
3255 plane_state->src_y = 0;
3256 plane_state->src_w = fb->width << 16;
3257 plane_state->src_h = fb->height << 16;
3258
3259 plane_state->crtc_x = 0;
3260 plane_state->crtc_y = 0;
3261 plane_state->crtc_w = fb->width;
3262 plane_state->crtc_h = fb->height;
3263
3264 intel_state->base.src = drm_plane_state_src(plane_state);
3265 intel_state->base.dst = drm_plane_state_dest(plane_state);
3266
3267 if (plane_config->tiling)
3268 dev_priv->preserve_bios_swizzle = true;
3269
3270 plane_state->fb = fb;
3271 plane_state->crtc = &intel_crtc->base;
3272
3273 atomic_or(to_intel_plane(primary)->frontbuffer_bit,
3274 &to_intel_frontbuffer(fb)->bits);
3275 }
3276
3277 static int skl_max_plane_width(const struct drm_framebuffer *fb,
3278 int color_plane,
3279 unsigned int rotation)
3280 {
3281 int cpp = fb->format->cpp[color_plane];
3282
3283 switch (fb->modifier) {
3284 case DRM_FORMAT_MOD_LINEAR:
3285 case I915_FORMAT_MOD_X_TILED:
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296 if (cpp == 8)
3297 return 4096;
3298 else
3299 return 5120;
3300 case I915_FORMAT_MOD_Y_TILED_CCS:
3301 case I915_FORMAT_MOD_Yf_TILED_CCS:
3302
3303 case I915_FORMAT_MOD_Y_TILED:
3304 case I915_FORMAT_MOD_Yf_TILED:
3305 if (cpp == 8)
3306 return 2048;
3307 else
3308 return 4096;
3309 default:
3310 MISSING_CASE(fb->modifier);
3311 return 2048;
3312 }
3313 }
3314
3315 static int glk_max_plane_width(const struct drm_framebuffer *fb,
3316 int color_plane,
3317 unsigned int rotation)
3318 {
3319 int cpp = fb->format->cpp[color_plane];
3320
3321 switch (fb->modifier) {
3322 case DRM_FORMAT_MOD_LINEAR:
3323 case I915_FORMAT_MOD_X_TILED:
3324 if (cpp == 8)
3325 return 4096;
3326 else
3327 return 5120;
3328 case I915_FORMAT_MOD_Y_TILED_CCS:
3329 case I915_FORMAT_MOD_Yf_TILED_CCS:
3330
3331 case I915_FORMAT_MOD_Y_TILED:
3332 case I915_FORMAT_MOD_Yf_TILED:
3333 if (cpp == 8)
3334 return 2048;
3335 else
3336 return 5120;
3337 default:
3338 MISSING_CASE(fb->modifier);
3339 return 2048;
3340 }
3341 }
3342
3343 static int icl_max_plane_width(const struct drm_framebuffer *fb,
3344 int color_plane,
3345 unsigned int rotation)
3346 {
3347 return 5120;
3348 }
3349
3350 static bool skl_check_main_ccs_coordinates(struct intel_plane_state *plane_state,
3351 int main_x, int main_y, u32 main_offset)
3352 {
3353 const struct drm_framebuffer *fb = plane_state->base.fb;
3354 int hsub = fb->format->hsub;
3355 int vsub = fb->format->vsub;
3356 int aux_x = plane_state->color_plane[1].x;
3357 int aux_y = plane_state->color_plane[1].y;
3358 u32 aux_offset = plane_state->color_plane[1].offset;
3359 u32 alignment = intel_surf_alignment(fb, 1);
3360
3361 while (aux_offset >= main_offset && aux_y <= main_y) {
3362 int x, y;
3363
3364 if (aux_x == main_x && aux_y == main_y)
3365 break;
3366
3367 if (aux_offset == 0)
3368 break;
3369
3370 x = aux_x / hsub;
3371 y = aux_y / vsub;
3372 aux_offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 1,
3373 aux_offset, aux_offset - alignment);
3374 aux_x = x * hsub + aux_x % hsub;
3375 aux_y = y * vsub + aux_y % vsub;
3376 }
3377
3378 if (aux_x != main_x || aux_y != main_y)
3379 return false;
3380
3381 plane_state->color_plane[1].offset = aux_offset;
3382 plane_state->color_plane[1].x = aux_x;
3383 plane_state->color_plane[1].y = aux_y;
3384
3385 return true;
3386 }
3387
3388 static int skl_check_main_surface(struct intel_plane_state *plane_state)
3389 {
3390 struct drm_i915_private *dev_priv = to_i915(plane_state->base.plane->dev);
3391 const struct drm_framebuffer *fb = plane_state->base.fb;
3392 unsigned int rotation = plane_state->base.rotation;
3393 int x = plane_state->base.src.x1 >> 16;
3394 int y = plane_state->base.src.y1 >> 16;
3395 int w = drm_rect_width(&plane_state->base.src) >> 16;
3396 int h = drm_rect_height(&plane_state->base.src) >> 16;
3397 int max_width;
3398 int max_height = 4096;
3399 u32 alignment, offset, aux_offset = plane_state->color_plane[1].offset;
3400
3401 if (INTEL_GEN(dev_priv) >= 11)
3402 max_width = icl_max_plane_width(fb, 0, rotation);
3403 else if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
3404 max_width = glk_max_plane_width(fb, 0, rotation);
3405 else
3406 max_width = skl_max_plane_width(fb, 0, rotation);
3407
3408 if (w > max_width || h > max_height) {
3409 DRM_DEBUG_KMS("requested Y/RGB source size %dx%d too big (limit %dx%d)\n",
3410 w, h, max_width, max_height);
3411 return -EINVAL;
3412 }
3413
3414 intel_add_fb_offsets(&x, &y, plane_state, 0);
3415 offset = intel_plane_compute_aligned_offset(&x, &y, plane_state, 0);
3416 alignment = intel_surf_alignment(fb, 0);
3417
3418
3419
3420
3421
3422
3423 if (offset > aux_offset)
3424 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3425 offset, aux_offset & ~(alignment - 1));
3426
3427
3428
3429
3430
3431
3432
3433 if (fb->modifier == I915_FORMAT_MOD_X_TILED) {
3434 int cpp = fb->format->cpp[0];
3435
3436 while ((x + w) * cpp > plane_state->color_plane[0].stride) {
3437 if (offset == 0) {
3438 DRM_DEBUG_KMS("Unable to find suitable display surface offset due to X-tiling\n");
3439 return -EINVAL;
3440 }
3441
3442 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3443 offset, offset - alignment);
3444 }
3445 }
3446
3447
3448
3449
3450
3451 if (is_ccs_modifier(fb->modifier)) {
3452 while (!skl_check_main_ccs_coordinates(plane_state, x, y, offset)) {
3453 if (offset == 0)
3454 break;
3455
3456 offset = intel_plane_adjust_aligned_offset(&x, &y, plane_state, 0,
3457 offset, offset - alignment);
3458 }
3459
3460 if (x != plane_state->color_plane[1].x || y != plane_state->color_plane[1].y) {
3461 DRM_DEBUG_KMS("Unable to find suitable display surface offset due to CCS\n");
3462 return -EINVAL;
3463 }
3464 }
3465
3466 plane_state->color_plane[0].offset = offset;
3467 plane_state->color_plane[0].x = x;
3468 plane_state->color_plane[0].y = y;
3469
3470
3471
3472
3473
3474 drm_rect_translate(&plane_state->base.src,
3475 (x << 16) - plane_state->base.src.x1,
3476 (y << 16) - plane_state->base.src.y1);
3477
3478 return 0;
3479 }
3480
3481 static int skl_check_nv12_aux_surface(struct intel_plane_state *plane_state)
3482 {
3483 const struct drm_framebuffer *fb = plane_state->base.fb;
3484 unsigned int rotation = plane_state->base.rotation;
3485 int max_width = skl_max_plane_width(fb, 1, rotation);
3486 int max_height = 4096;
3487 int x = plane_state->base.src.x1 >> 17;
3488 int y = plane_state->base.src.y1 >> 17;
3489 int w = drm_rect_width(&plane_state->base.src) >> 17;
3490 int h = drm_rect_height(&plane_state->base.src) >> 17;
3491 u32 offset;
3492
3493 intel_add_fb_offsets(&x, &y, plane_state, 1);
3494 offset = intel_plane_compute_aligned_offset(&x, &y, plane_state, 1);
3495
3496
3497 if (w > max_width || h > max_height) {
3498 DRM_DEBUG_KMS("CbCr source size %dx%d too big (limit %dx%d)\n",
3499 w, h, max_width, max_height);
3500 return -EINVAL;
3501 }
3502
3503 plane_state->color_plane[1].offset = offset;
3504 plane_state->color_plane[1].x = x;
3505 plane_state->color_plane[1].y = y;
3506
3507 return 0;
3508 }
3509
3510 static int skl_check_ccs_aux_surface(struct intel_plane_state *plane_state)
3511 {
3512 const struct drm_framebuffer *fb = plane_state->base.fb;
3513 int src_x = plane_state->base.src.x1 >> 16;
3514 int src_y = plane_state->base.src.y1 >> 16;
3515 int hsub = fb->format->hsub;
3516 int vsub = fb->format->vsub;
3517 int x = src_x / hsub;
3518 int y = src_y / vsub;
3519 u32 offset;
3520
3521 intel_add_fb_offsets(&x, &y, plane_state, 1);
3522 offset = intel_plane_compute_aligned_offset(&x, &y, plane_state, 1);
3523
3524 plane_state->color_plane[1].offset = offset;
3525 plane_state->color_plane[1].x = x * hsub + src_x % hsub;
3526 plane_state->color_plane[1].y = y * vsub + src_y % vsub;
3527
3528 return 0;
3529 }
3530
3531 int skl_check_plane_surface(struct intel_plane_state *plane_state)
3532 {
3533 const struct drm_framebuffer *fb = plane_state->base.fb;
3534 int ret;
3535
3536 ret = intel_plane_compute_gtt(plane_state);
3537 if (ret)
3538 return ret;
3539
3540 if (!plane_state->base.visible)
3541 return 0;
3542
3543
3544
3545
3546
3547 if (is_planar_yuv_format(fb->format->format)) {
3548 ret = skl_check_nv12_aux_surface(plane_state);
3549 if (ret)
3550 return ret;
3551 } else if (is_ccs_modifier(fb->modifier)) {
3552 ret = skl_check_ccs_aux_surface(plane_state);
3553 if (ret)
3554 return ret;
3555 } else {
3556 plane_state->color_plane[1].offset = ~0xfff;
3557 plane_state->color_plane[1].x = 0;
3558 plane_state->color_plane[1].y = 0;
3559 }
3560
3561 ret = skl_check_main_surface(plane_state);
3562 if (ret)
3563 return ret;
3564
3565 return 0;
3566 }
3567
3568 unsigned int
3569 i9xx_plane_max_stride(struct intel_plane *plane,
3570 u32 pixel_format, u64 modifier,
3571 unsigned int rotation)
3572 {
3573 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3574
3575 if (!HAS_GMCH(dev_priv)) {
3576 return 32*1024;
3577 } else if (INTEL_GEN(dev_priv) >= 4) {
3578 if (modifier == I915_FORMAT_MOD_X_TILED)
3579 return 16*1024;
3580 else
3581 return 32*1024;
3582 } else if (INTEL_GEN(dev_priv) >= 3) {
3583 if (modifier == I915_FORMAT_MOD_X_TILED)
3584 return 8*1024;
3585 else
3586 return 16*1024;
3587 } else {
3588 if (plane->i9xx_plane == PLANE_C)
3589 return 4*1024;
3590 else
3591 return 8*1024;
3592 }
3593 }
3594
3595 static u32 i9xx_plane_ctl_crtc(const struct intel_crtc_state *crtc_state)
3596 {
3597 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
3598 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
3599 u32 dspcntr = 0;
3600
3601 if (crtc_state->gamma_enable)
3602 dspcntr |= DISPPLANE_GAMMA_ENABLE;
3603
3604 if (crtc_state->csc_enable)
3605 dspcntr |= DISPPLANE_PIPE_CSC_ENABLE;
3606
3607 if (INTEL_GEN(dev_priv) < 5)
3608 dspcntr |= DISPPLANE_SEL_PIPE(crtc->pipe);
3609
3610 return dspcntr;
3611 }
3612
3613 static u32 i9xx_plane_ctl(const struct intel_crtc_state *crtc_state,
3614 const struct intel_plane_state *plane_state)
3615 {
3616 struct drm_i915_private *dev_priv =
3617 to_i915(plane_state->base.plane->dev);
3618 const struct drm_framebuffer *fb = plane_state->base.fb;
3619 unsigned int rotation = plane_state->base.rotation;
3620 u32 dspcntr;
3621
3622 dspcntr = DISPLAY_PLANE_ENABLE;
3623
3624 if (IS_G4X(dev_priv) || IS_GEN(dev_priv, 5) ||
3625 IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
3626 dspcntr |= DISPPLANE_TRICKLE_FEED_DISABLE;
3627
3628 switch (fb->format->format) {
3629 case DRM_FORMAT_C8:
3630 dspcntr |= DISPPLANE_8BPP;
3631 break;
3632 case DRM_FORMAT_XRGB1555:
3633 dspcntr |= DISPPLANE_BGRX555;
3634 break;
3635 case DRM_FORMAT_RGB565:
3636 dspcntr |= DISPPLANE_BGRX565;
3637 break;
3638 case DRM_FORMAT_XRGB8888:
3639 dspcntr |= DISPPLANE_BGRX888;
3640 break;
3641 case DRM_FORMAT_XBGR8888:
3642 dspcntr |= DISPPLANE_RGBX888;
3643 break;
3644 case DRM_FORMAT_XRGB2101010:
3645 dspcntr |= DISPPLANE_BGRX101010;
3646 break;
3647 case DRM_FORMAT_XBGR2101010:
3648 dspcntr |= DISPPLANE_RGBX101010;
3649 break;
3650 default:
3651 MISSING_CASE(fb->format->format);
3652 return 0;
3653 }
3654
3655 if (INTEL_GEN(dev_priv) >= 4 &&
3656 fb->modifier == I915_FORMAT_MOD_X_TILED)
3657 dspcntr |= DISPPLANE_TILED;
3658
3659 if (rotation & DRM_MODE_ROTATE_180)
3660 dspcntr |= DISPPLANE_ROTATE_180;
3661
3662 if (rotation & DRM_MODE_REFLECT_X)
3663 dspcntr |= DISPPLANE_MIRROR;
3664
3665 return dspcntr;
3666 }
3667
3668 int i9xx_check_plane_surface(struct intel_plane_state *plane_state)
3669 {
3670 struct drm_i915_private *dev_priv =
3671 to_i915(plane_state->base.plane->dev);
3672 int src_x, src_y;
3673 u32 offset;
3674 int ret;
3675
3676 ret = intel_plane_compute_gtt(plane_state);
3677 if (ret)
3678 return ret;
3679
3680 if (!plane_state->base.visible)
3681 return 0;
3682
3683 src_x = plane_state->base.src.x1 >> 16;
3684 src_y = plane_state->base.src.y1 >> 16;
3685
3686 intel_add_fb_offsets(&src_x, &src_y, plane_state, 0);
3687
3688 if (INTEL_GEN(dev_priv) >= 4)
3689 offset = intel_plane_compute_aligned_offset(&src_x, &src_y,
3690 plane_state, 0);
3691 else
3692 offset = 0;
3693
3694
3695
3696
3697
3698 drm_rect_translate(&plane_state->base.src,
3699 (src_x << 16) - plane_state->base.src.x1,
3700 (src_y << 16) - plane_state->base.src.y1);
3701
3702
3703 if (!IS_HASWELL(dev_priv) && !IS_BROADWELL(dev_priv)) {
3704 unsigned int rotation = plane_state->base.rotation;
3705 int src_w = drm_rect_width(&plane_state->base.src) >> 16;
3706 int src_h = drm_rect_height(&plane_state->base.src) >> 16;
3707
3708 if (rotation & DRM_MODE_ROTATE_180) {
3709 src_x += src_w - 1;
3710 src_y += src_h - 1;
3711 } else if (rotation & DRM_MODE_REFLECT_X) {
3712 src_x += src_w - 1;
3713 }
3714 }
3715
3716 plane_state->color_plane[0].offset = offset;
3717 plane_state->color_plane[0].x = src_x;
3718 plane_state->color_plane[0].y = src_y;
3719
3720 return 0;
3721 }
3722
3723 static bool i9xx_plane_has_windowing(struct intel_plane *plane)
3724 {
3725 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3726 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
3727
3728 if (IS_CHERRYVIEW(dev_priv))
3729 return i9xx_plane == PLANE_B;
3730 else if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
3731 return false;
3732 else if (IS_GEN(dev_priv, 4))
3733 return i9xx_plane == PLANE_C;
3734 else
3735 return i9xx_plane == PLANE_B ||
3736 i9xx_plane == PLANE_C;
3737 }
3738
3739 static int
3740 i9xx_plane_check(struct intel_crtc_state *crtc_state,
3741 struct intel_plane_state *plane_state)
3742 {
3743 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
3744 int ret;
3745
3746 ret = chv_plane_check_rotation(plane_state);
3747 if (ret)
3748 return ret;
3749
3750 ret = drm_atomic_helper_check_plane_state(&plane_state->base,
3751 &crtc_state->base,
3752 DRM_PLANE_HELPER_NO_SCALING,
3753 DRM_PLANE_HELPER_NO_SCALING,
3754 i9xx_plane_has_windowing(plane),
3755 true);
3756 if (ret)
3757 return ret;
3758
3759 ret = i9xx_check_plane_surface(plane_state);
3760 if (ret)
3761 return ret;
3762
3763 if (!plane_state->base.visible)
3764 return 0;
3765
3766 ret = intel_plane_check_src_coordinates(plane_state);
3767 if (ret)
3768 return ret;
3769
3770 plane_state->ctl = i9xx_plane_ctl(crtc_state, plane_state);
3771
3772 return 0;
3773 }
3774
3775 static void i9xx_update_plane(struct intel_plane *plane,
3776 const struct intel_crtc_state *crtc_state,
3777 const struct intel_plane_state *plane_state)
3778 {
3779 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3780 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
3781 u32 linear_offset;
3782 int x = plane_state->color_plane[0].x;
3783 int y = plane_state->color_plane[0].y;
3784 int crtc_x = plane_state->base.dst.x1;
3785 int crtc_y = plane_state->base.dst.y1;
3786 int crtc_w = drm_rect_width(&plane_state->base.dst);
3787 int crtc_h = drm_rect_height(&plane_state->base.dst);
3788 unsigned long irqflags;
3789 u32 dspaddr_offset;
3790 u32 dspcntr;
3791
3792 dspcntr = plane_state->ctl | i9xx_plane_ctl_crtc(crtc_state);
3793
3794 linear_offset = intel_fb_xy_to_linear(x, y, plane_state, 0);
3795
3796 if (INTEL_GEN(dev_priv) >= 4)
3797 dspaddr_offset = plane_state->color_plane[0].offset;
3798 else
3799 dspaddr_offset = linear_offset;
3800
3801 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
3802
3803 I915_WRITE_FW(DSPSTRIDE(i9xx_plane), plane_state->color_plane[0].stride);
3804
3805 if (INTEL_GEN(dev_priv) < 4) {
3806
3807
3808
3809
3810
3811 I915_WRITE_FW(DSPPOS(i9xx_plane), (crtc_y << 16) | crtc_x);
3812 I915_WRITE_FW(DSPSIZE(i9xx_plane),
3813 ((crtc_h - 1) << 16) | (crtc_w - 1));
3814 } else if (IS_CHERRYVIEW(dev_priv) && i9xx_plane == PLANE_B) {
3815 I915_WRITE_FW(PRIMPOS(i9xx_plane), (crtc_y << 16) | crtc_x);
3816 I915_WRITE_FW(PRIMSIZE(i9xx_plane),
3817 ((crtc_h - 1) << 16) | (crtc_w - 1));
3818 I915_WRITE_FW(PRIMCNSTALPHA(i9xx_plane), 0);
3819 }
3820
3821 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
3822 I915_WRITE_FW(DSPOFFSET(i9xx_plane), (y << 16) | x);
3823 } else if (INTEL_GEN(dev_priv) >= 4) {
3824 I915_WRITE_FW(DSPLINOFF(i9xx_plane), linear_offset);
3825 I915_WRITE_FW(DSPTILEOFF(i9xx_plane), (y << 16) | x);
3826 }
3827
3828
3829
3830
3831
3832
3833 I915_WRITE_FW(DSPCNTR(i9xx_plane), dspcntr);
3834 if (INTEL_GEN(dev_priv) >= 4)
3835 I915_WRITE_FW(DSPSURF(i9xx_plane),
3836 intel_plane_ggtt_offset(plane_state) +
3837 dspaddr_offset);
3838 else
3839 I915_WRITE_FW(DSPADDR(i9xx_plane),
3840 intel_plane_ggtt_offset(plane_state) +
3841 dspaddr_offset);
3842
3843 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
3844 }
3845
3846 static void i9xx_disable_plane(struct intel_plane *plane,
3847 const struct intel_crtc_state *crtc_state)
3848 {
3849 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3850 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
3851 unsigned long irqflags;
3852 u32 dspcntr;
3853
3854
3855
3856
3857
3858
3859
3860
3861
3862
3863
3864 dspcntr = i9xx_plane_ctl_crtc(crtc_state);
3865
3866 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
3867
3868 I915_WRITE_FW(DSPCNTR(i9xx_plane), dspcntr);
3869 if (INTEL_GEN(dev_priv) >= 4)
3870 I915_WRITE_FW(DSPSURF(i9xx_plane), 0);
3871 else
3872 I915_WRITE_FW(DSPADDR(i9xx_plane), 0);
3873
3874 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
3875 }
3876
3877 static bool i9xx_plane_get_hw_state(struct intel_plane *plane,
3878 enum pipe *pipe)
3879 {
3880 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
3881 enum intel_display_power_domain power_domain;
3882 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
3883 intel_wakeref_t wakeref;
3884 bool ret;
3885 u32 val;
3886
3887
3888
3889
3890
3891
3892 power_domain = POWER_DOMAIN_PIPE(plane->pipe);
3893 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
3894 if (!wakeref)
3895 return false;
3896
3897 val = I915_READ(DSPCNTR(i9xx_plane));
3898
3899 ret = val & DISPLAY_PLANE_ENABLE;
3900
3901 if (INTEL_GEN(dev_priv) >= 5)
3902 *pipe = plane->pipe;
3903 else
3904 *pipe = (val & DISPPLANE_SEL_PIPE_MASK) >>
3905 DISPPLANE_SEL_PIPE_SHIFT;
3906
3907 intel_display_power_put(dev_priv, power_domain, wakeref);
3908
3909 return ret;
3910 }
3911
3912 static void skl_detach_scaler(struct intel_crtc *intel_crtc, int id)
3913 {
3914 struct drm_device *dev = intel_crtc->base.dev;
3915 struct drm_i915_private *dev_priv = to_i915(dev);
3916
3917 I915_WRITE(SKL_PS_CTRL(intel_crtc->pipe, id), 0);
3918 I915_WRITE(SKL_PS_WIN_POS(intel_crtc->pipe, id), 0);
3919 I915_WRITE(SKL_PS_WIN_SZ(intel_crtc->pipe, id), 0);
3920 }
3921
3922
3923
3924
3925 static void skl_detach_scalers(const struct intel_crtc_state *crtc_state)
3926 {
3927 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->base.crtc);
3928 const struct intel_crtc_scaler_state *scaler_state =
3929 &crtc_state->scaler_state;
3930 int i;
3931
3932
3933 for (i = 0; i < intel_crtc->num_scalers; i++) {
3934 if (!scaler_state->scalers[i].in_use)
3935 skl_detach_scaler(intel_crtc, i);
3936 }
3937 }
3938
3939 static unsigned int skl_plane_stride_mult(const struct drm_framebuffer *fb,
3940 int color_plane, unsigned int rotation)
3941 {
3942
3943
3944
3945
3946 if (fb->modifier == DRM_FORMAT_MOD_LINEAR)
3947 return 64;
3948 else if (drm_rotation_90_or_270(rotation))
3949 return intel_tile_height(fb, color_plane);
3950 else
3951 return intel_tile_width_bytes(fb, color_plane);
3952 }
3953
3954 u32 skl_plane_stride(const struct intel_plane_state *plane_state,
3955 int color_plane)
3956 {
3957 const struct drm_framebuffer *fb = plane_state->base.fb;
3958 unsigned int rotation = plane_state->base.rotation;
3959 u32 stride = plane_state->color_plane[color_plane].stride;
3960
3961 if (color_plane >= fb->format->num_planes)
3962 return 0;
3963
3964 return stride / skl_plane_stride_mult(fb, color_plane, rotation);
3965 }
3966
3967 static u32 skl_plane_ctl_format(u32 pixel_format)
3968 {
3969 switch (pixel_format) {
3970 case DRM_FORMAT_C8:
3971 return PLANE_CTL_FORMAT_INDEXED;
3972 case DRM_FORMAT_RGB565:
3973 return PLANE_CTL_FORMAT_RGB_565;
3974 case DRM_FORMAT_XBGR8888:
3975 case DRM_FORMAT_ABGR8888:
3976 return PLANE_CTL_FORMAT_XRGB_8888 | PLANE_CTL_ORDER_RGBX;
3977 case DRM_FORMAT_XRGB8888:
3978 case DRM_FORMAT_ARGB8888:
3979 return PLANE_CTL_FORMAT_XRGB_8888;
3980 case DRM_FORMAT_XBGR2101010:
3981 return PLANE_CTL_FORMAT_XRGB_2101010 | PLANE_CTL_ORDER_RGBX;
3982 case DRM_FORMAT_XRGB2101010:
3983 return PLANE_CTL_FORMAT_XRGB_2101010;
3984 case DRM_FORMAT_XBGR16161616F:
3985 case DRM_FORMAT_ABGR16161616F:
3986 return PLANE_CTL_FORMAT_XRGB_16161616F | PLANE_CTL_ORDER_RGBX;
3987 case DRM_FORMAT_XRGB16161616F:
3988 case DRM_FORMAT_ARGB16161616F:
3989 return PLANE_CTL_FORMAT_XRGB_16161616F;
3990 case DRM_FORMAT_YUYV:
3991 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_YUYV;
3992 case DRM_FORMAT_YVYU:
3993 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_YVYU;
3994 case DRM_FORMAT_UYVY:
3995 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_UYVY;
3996 case DRM_FORMAT_VYUY:
3997 return PLANE_CTL_FORMAT_YUV422 | PLANE_CTL_YUV422_VYUY;
3998 case DRM_FORMAT_NV12:
3999 return PLANE_CTL_FORMAT_NV12;
4000 case DRM_FORMAT_P010:
4001 return PLANE_CTL_FORMAT_P010;
4002 case DRM_FORMAT_P012:
4003 return PLANE_CTL_FORMAT_P012;
4004 case DRM_FORMAT_P016:
4005 return PLANE_CTL_FORMAT_P016;
4006 case DRM_FORMAT_Y210:
4007 return PLANE_CTL_FORMAT_Y210;
4008 case DRM_FORMAT_Y212:
4009 return PLANE_CTL_FORMAT_Y212;
4010 case DRM_FORMAT_Y216:
4011 return PLANE_CTL_FORMAT_Y216;
4012 case DRM_FORMAT_XVYU2101010:
4013 return PLANE_CTL_FORMAT_Y410;
4014 case DRM_FORMAT_XVYU12_16161616:
4015 return PLANE_CTL_FORMAT_Y412;
4016 case DRM_FORMAT_XVYU16161616:
4017 return PLANE_CTL_FORMAT_Y416;
4018 default:
4019 MISSING_CASE(pixel_format);
4020 }
4021
4022 return 0;
4023 }
4024
4025 static u32 skl_plane_ctl_alpha(const struct intel_plane_state *plane_state)
4026 {
4027 if (!plane_state->base.fb->format->has_alpha)
4028 return PLANE_CTL_ALPHA_DISABLE;
4029
4030 switch (plane_state->base.pixel_blend_mode) {
4031 case DRM_MODE_BLEND_PIXEL_NONE:
4032 return PLANE_CTL_ALPHA_DISABLE;
4033 case DRM_MODE_BLEND_PREMULTI:
4034 return PLANE_CTL_ALPHA_SW_PREMULTIPLY;
4035 case DRM_MODE_BLEND_COVERAGE:
4036 return PLANE_CTL_ALPHA_HW_PREMULTIPLY;
4037 default:
4038 MISSING_CASE(plane_state->base.pixel_blend_mode);
4039 return PLANE_CTL_ALPHA_DISABLE;
4040 }
4041 }
4042
4043 static u32 glk_plane_color_ctl_alpha(const struct intel_plane_state *plane_state)
4044 {
4045 if (!plane_state->base.fb->format->has_alpha)
4046 return PLANE_COLOR_ALPHA_DISABLE;
4047
4048 switch (plane_state->base.pixel_blend_mode) {
4049 case DRM_MODE_BLEND_PIXEL_NONE:
4050 return PLANE_COLOR_ALPHA_DISABLE;
4051 case DRM_MODE_BLEND_PREMULTI:
4052 return PLANE_COLOR_ALPHA_SW_PREMULTIPLY;
4053 case DRM_MODE_BLEND_COVERAGE:
4054 return PLANE_COLOR_ALPHA_HW_PREMULTIPLY;
4055 default:
4056 MISSING_CASE(plane_state->base.pixel_blend_mode);
4057 return PLANE_COLOR_ALPHA_DISABLE;
4058 }
4059 }
4060
4061 static u32 skl_plane_ctl_tiling(u64 fb_modifier)
4062 {
4063 switch (fb_modifier) {
4064 case DRM_FORMAT_MOD_LINEAR:
4065 break;
4066 case I915_FORMAT_MOD_X_TILED:
4067 return PLANE_CTL_TILED_X;
4068 case I915_FORMAT_MOD_Y_TILED:
4069 return PLANE_CTL_TILED_Y;
4070 case I915_FORMAT_MOD_Y_TILED_CCS:
4071 return PLANE_CTL_TILED_Y | PLANE_CTL_RENDER_DECOMPRESSION_ENABLE;
4072 case I915_FORMAT_MOD_Yf_TILED:
4073 return PLANE_CTL_TILED_YF;
4074 case I915_FORMAT_MOD_Yf_TILED_CCS:
4075 return PLANE_CTL_TILED_YF | PLANE_CTL_RENDER_DECOMPRESSION_ENABLE;
4076 default:
4077 MISSING_CASE(fb_modifier);
4078 }
4079
4080 return 0;
4081 }
4082
4083 static u32 skl_plane_ctl_rotate(unsigned int rotate)
4084 {
4085 switch (rotate) {
4086 case DRM_MODE_ROTATE_0:
4087 break;
4088
4089
4090
4091
4092 case DRM_MODE_ROTATE_90:
4093 return PLANE_CTL_ROTATE_270;
4094 case DRM_MODE_ROTATE_180:
4095 return PLANE_CTL_ROTATE_180;
4096 case DRM_MODE_ROTATE_270:
4097 return PLANE_CTL_ROTATE_90;
4098 default:
4099 MISSING_CASE(rotate);
4100 }
4101
4102 return 0;
4103 }
4104
4105 static u32 cnl_plane_ctl_flip(unsigned int reflect)
4106 {
4107 switch (reflect) {
4108 case 0:
4109 break;
4110 case DRM_MODE_REFLECT_X:
4111 return PLANE_CTL_FLIP_HORIZONTAL;
4112 case DRM_MODE_REFLECT_Y:
4113 default:
4114 MISSING_CASE(reflect);
4115 }
4116
4117 return 0;
4118 }
4119
4120 u32 skl_plane_ctl_crtc(const struct intel_crtc_state *crtc_state)
4121 {
4122 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
4123 u32 plane_ctl = 0;
4124
4125 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
4126 return plane_ctl;
4127
4128 if (crtc_state->gamma_enable)
4129 plane_ctl |= PLANE_CTL_PIPE_GAMMA_ENABLE;
4130
4131 if (crtc_state->csc_enable)
4132 plane_ctl |= PLANE_CTL_PIPE_CSC_ENABLE;
4133
4134 return plane_ctl;
4135 }
4136
4137 u32 skl_plane_ctl(const struct intel_crtc_state *crtc_state,
4138 const struct intel_plane_state *plane_state)
4139 {
4140 struct drm_i915_private *dev_priv =
4141 to_i915(plane_state->base.plane->dev);
4142 const struct drm_framebuffer *fb = plane_state->base.fb;
4143 unsigned int rotation = plane_state->base.rotation;
4144 const struct drm_intel_sprite_colorkey *key = &plane_state->ckey;
4145 u32 plane_ctl;
4146
4147 plane_ctl = PLANE_CTL_ENABLE;
4148
4149 if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) {
4150 plane_ctl |= skl_plane_ctl_alpha(plane_state);
4151 plane_ctl |= PLANE_CTL_PLANE_GAMMA_DISABLE;
4152
4153 if (plane_state->base.color_encoding == DRM_COLOR_YCBCR_BT709)
4154 plane_ctl |= PLANE_CTL_YUV_TO_RGB_CSC_FORMAT_BT709;
4155
4156 if (plane_state->base.color_range == DRM_COLOR_YCBCR_FULL_RANGE)
4157 plane_ctl |= PLANE_CTL_YUV_RANGE_CORRECTION_DISABLE;
4158 }
4159
4160 plane_ctl |= skl_plane_ctl_format(fb->format->format);
4161 plane_ctl |= skl_plane_ctl_tiling(fb->modifier);
4162 plane_ctl |= skl_plane_ctl_rotate(rotation & DRM_MODE_ROTATE_MASK);
4163
4164 if (INTEL_GEN(dev_priv) >= 10)
4165 plane_ctl |= cnl_plane_ctl_flip(rotation &
4166 DRM_MODE_REFLECT_MASK);
4167
4168 if (key->flags & I915_SET_COLORKEY_DESTINATION)
4169 plane_ctl |= PLANE_CTL_KEY_ENABLE_DESTINATION;
4170 else if (key->flags & I915_SET_COLORKEY_SOURCE)
4171 plane_ctl |= PLANE_CTL_KEY_ENABLE_SOURCE;
4172
4173 return plane_ctl;
4174 }
4175
4176 u32 glk_plane_color_ctl_crtc(const struct intel_crtc_state *crtc_state)
4177 {
4178 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
4179 u32 plane_color_ctl = 0;
4180
4181 if (INTEL_GEN(dev_priv) >= 11)
4182 return plane_color_ctl;
4183
4184 if (crtc_state->gamma_enable)
4185 plane_color_ctl |= PLANE_COLOR_PIPE_GAMMA_ENABLE;
4186
4187 if (crtc_state->csc_enable)
4188 plane_color_ctl |= PLANE_COLOR_PIPE_CSC_ENABLE;
4189
4190 return plane_color_ctl;
4191 }
4192
4193 u32 glk_plane_color_ctl(const struct intel_crtc_state *crtc_state,
4194 const struct intel_plane_state *plane_state)
4195 {
4196 struct drm_i915_private *dev_priv =
4197 to_i915(plane_state->base.plane->dev);
4198 const struct drm_framebuffer *fb = plane_state->base.fb;
4199 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
4200 u32 plane_color_ctl = 0;
4201
4202 plane_color_ctl |= PLANE_COLOR_PLANE_GAMMA_DISABLE;
4203 plane_color_ctl |= glk_plane_color_ctl_alpha(plane_state);
4204
4205 if (fb->format->is_yuv && !icl_is_hdr_plane(dev_priv, plane->id)) {
4206 if (plane_state->base.color_encoding == DRM_COLOR_YCBCR_BT709)
4207 plane_color_ctl |= PLANE_COLOR_CSC_MODE_YUV709_TO_RGB709;
4208 else
4209 plane_color_ctl |= PLANE_COLOR_CSC_MODE_YUV601_TO_RGB709;
4210
4211 if (plane_state->base.color_range == DRM_COLOR_YCBCR_FULL_RANGE)
4212 plane_color_ctl |= PLANE_COLOR_YUV_RANGE_CORRECTION_DISABLE;
4213 } else if (fb->format->is_yuv) {
4214 plane_color_ctl |= PLANE_COLOR_INPUT_CSC_ENABLE;
4215 }
4216
4217 return plane_color_ctl;
4218 }
4219
4220 static int
4221 __intel_display_resume(struct drm_device *dev,
4222 struct drm_atomic_state *state,
4223 struct drm_modeset_acquire_ctx *ctx)
4224 {
4225 struct drm_crtc_state *crtc_state;
4226 struct drm_crtc *crtc;
4227 int i, ret;
4228
4229 intel_modeset_setup_hw_state(dev, ctx);
4230 i915_redisable_vga(to_i915(dev));
4231
4232 if (!state)
4233 return 0;
4234
4235
4236
4237
4238
4239
4240 for_each_new_crtc_in_state(state, crtc, crtc_state, i) {
4241
4242
4243
4244
4245
4246 crtc_state->mode_changed = true;
4247 }
4248
4249
4250 if (!HAS_GMCH(to_i915(dev)))
4251 to_intel_atomic_state(state)->skip_intermediate_wm = true;
4252
4253 ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
4254
4255 WARN_ON(ret == -EDEADLK);
4256 return ret;
4257 }
4258
4259 static bool gpu_reset_clobbers_display(struct drm_i915_private *dev_priv)
4260 {
4261 return (INTEL_INFO(dev_priv)->gpu_reset_clobbers_display &&
4262 intel_has_gpu_reset(dev_priv));
4263 }
4264
4265 void intel_prepare_reset(struct drm_i915_private *dev_priv)
4266 {
4267 struct drm_device *dev = &dev_priv->drm;
4268 struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
4269 struct drm_atomic_state *state;
4270 int ret;
4271
4272
4273 if (!i915_modparams.force_reset_modeset_test &&
4274 !gpu_reset_clobbers_display(dev_priv))
4275 return;
4276
4277
4278 set_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags);
4279 smp_mb__after_atomic();
4280 wake_up_bit(&dev_priv->gt.reset.flags, I915_RESET_MODESET);
4281
4282 if (atomic_read(&dev_priv->gpu_error.pending_fb_pin)) {
4283 DRM_DEBUG_KMS("Modeset potentially stuck, unbreaking through wedging\n");
4284 intel_gt_set_wedged(&dev_priv->gt);
4285 }
4286
4287
4288
4289
4290
4291 mutex_lock(&dev->mode_config.mutex);
4292 drm_modeset_acquire_init(ctx, 0);
4293 while (1) {
4294 ret = drm_modeset_lock_all_ctx(dev, ctx);
4295 if (ret != -EDEADLK)
4296 break;
4297
4298 drm_modeset_backoff(ctx);
4299 }
4300
4301
4302
4303
4304 state = drm_atomic_helper_duplicate_state(dev, ctx);
4305 if (IS_ERR(state)) {
4306 ret = PTR_ERR(state);
4307 DRM_ERROR("Duplicating state failed with %i\n", ret);
4308 return;
4309 }
4310
4311 ret = drm_atomic_helper_disable_all(dev, ctx);
4312 if (ret) {
4313 DRM_ERROR("Suspending crtc's failed with %i\n", ret);
4314 drm_atomic_state_put(state);
4315 return;
4316 }
4317
4318 dev_priv->modeset_restore_state = state;
4319 state->acquire_ctx = ctx;
4320 }
4321
4322 void intel_finish_reset(struct drm_i915_private *dev_priv)
4323 {
4324 struct drm_device *dev = &dev_priv->drm;
4325 struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
4326 struct drm_atomic_state *state;
4327 int ret;
4328
4329
4330 if (!test_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags))
4331 return;
4332
4333 state = fetch_and_zero(&dev_priv->modeset_restore_state);
4334 if (!state)
4335 goto unlock;
4336
4337
4338 if (!gpu_reset_clobbers_display(dev_priv)) {
4339
4340 ret = __intel_display_resume(dev, state, ctx);
4341 if (ret)
4342 DRM_ERROR("Restoring old state failed with %i\n", ret);
4343 } else {
4344
4345
4346
4347
4348 intel_pps_unlock_regs_wa(dev_priv);
4349 intel_modeset_init_hw(dev);
4350 intel_init_clock_gating(dev_priv);
4351
4352 spin_lock_irq(&dev_priv->irq_lock);
4353 if (dev_priv->display.hpd_irq_setup)
4354 dev_priv->display.hpd_irq_setup(dev_priv);
4355 spin_unlock_irq(&dev_priv->irq_lock);
4356
4357 ret = __intel_display_resume(dev, state, ctx);
4358 if (ret)
4359 DRM_ERROR("Restoring old state failed with %i\n", ret);
4360
4361 intel_hpd_init(dev_priv);
4362 }
4363
4364 drm_atomic_state_put(state);
4365 unlock:
4366 drm_modeset_drop_locks(ctx);
4367 drm_modeset_acquire_fini(ctx);
4368 mutex_unlock(&dev->mode_config.mutex);
4369
4370 clear_bit_unlock(I915_RESET_MODESET, &dev_priv->gt.reset.flags);
4371 }
4372
4373 static void icl_set_pipe_chicken(struct intel_crtc *crtc)
4374 {
4375 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4376 enum pipe pipe = crtc->pipe;
4377 u32 tmp;
4378
4379 tmp = I915_READ(PIPE_CHICKEN(pipe));
4380
4381
4382
4383
4384
4385
4386 tmp |= PER_PIXEL_ALPHA_BYPASS_EN;
4387
4388
4389
4390
4391
4392
4393 tmp |= PIXEL_ROUNDING_TRUNC_FB_PASSTHRU;
4394 I915_WRITE(PIPE_CHICKEN(pipe), tmp);
4395 }
4396
4397 static void intel_update_pipe_config(const struct intel_crtc_state *old_crtc_state,
4398 const struct intel_crtc_state *new_crtc_state)
4399 {
4400 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
4401 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4402
4403
4404 crtc->base.mode = new_crtc_state->base.mode;
4405
4406
4407
4408
4409
4410
4411
4412
4413
4414
4415 I915_WRITE(PIPESRC(crtc->pipe),
4416 ((new_crtc_state->pipe_src_w - 1) << 16) |
4417 (new_crtc_state->pipe_src_h - 1));
4418
4419
4420 if (INTEL_GEN(dev_priv) >= 9) {
4421 skl_detach_scalers(new_crtc_state);
4422
4423 if (new_crtc_state->pch_pfit.enabled)
4424 skylake_pfit_enable(new_crtc_state);
4425 } else if (HAS_PCH_SPLIT(dev_priv)) {
4426 if (new_crtc_state->pch_pfit.enabled)
4427 ironlake_pfit_enable(new_crtc_state);
4428 else if (old_crtc_state->pch_pfit.enabled)
4429 ironlake_pfit_disable(old_crtc_state);
4430 }
4431
4432 if (INTEL_GEN(dev_priv) >= 11)
4433 icl_set_pipe_chicken(crtc);
4434 }
4435
4436 static void intel_fdi_normal_train(struct intel_crtc *crtc)
4437 {
4438 struct drm_device *dev = crtc->base.dev;
4439 struct drm_i915_private *dev_priv = to_i915(dev);
4440 int pipe = crtc->pipe;
4441 i915_reg_t reg;
4442 u32 temp;
4443
4444
4445 reg = FDI_TX_CTL(pipe);
4446 temp = I915_READ(reg);
4447 if (IS_IVYBRIDGE(dev_priv)) {
4448 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
4449 temp |= FDI_LINK_TRAIN_NONE_IVB | FDI_TX_ENHANCE_FRAME_ENABLE;
4450 } else {
4451 temp &= ~FDI_LINK_TRAIN_NONE;
4452 temp |= FDI_LINK_TRAIN_NONE | FDI_TX_ENHANCE_FRAME_ENABLE;
4453 }
4454 I915_WRITE(reg, temp);
4455
4456 reg = FDI_RX_CTL(pipe);
4457 temp = I915_READ(reg);
4458 if (HAS_PCH_CPT(dev_priv)) {
4459 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4460 temp |= FDI_LINK_TRAIN_NORMAL_CPT;
4461 } else {
4462 temp &= ~FDI_LINK_TRAIN_NONE;
4463 temp |= FDI_LINK_TRAIN_NONE;
4464 }
4465 I915_WRITE(reg, temp | FDI_RX_ENHANCE_FRAME_ENABLE);
4466
4467
4468 POSTING_READ(reg);
4469 udelay(1000);
4470
4471
4472 if (IS_IVYBRIDGE(dev_priv))
4473 I915_WRITE(reg, I915_READ(reg) | FDI_FS_ERRC_ENABLE |
4474 FDI_FE_ERRC_ENABLE);
4475 }
4476
4477
4478 static void ironlake_fdi_link_train(struct intel_crtc *crtc,
4479 const struct intel_crtc_state *crtc_state)
4480 {
4481 struct drm_device *dev = crtc->base.dev;
4482 struct drm_i915_private *dev_priv = to_i915(dev);
4483 int pipe = crtc->pipe;
4484 i915_reg_t reg;
4485 u32 temp, tries;
4486
4487
4488 assert_pipe_enabled(dev_priv, pipe);
4489
4490
4491
4492 reg = FDI_RX_IMR(pipe);
4493 temp = I915_READ(reg);
4494 temp &= ~FDI_RX_SYMBOL_LOCK;
4495 temp &= ~FDI_RX_BIT_LOCK;
4496 I915_WRITE(reg, temp);
4497 I915_READ(reg);
4498 udelay(150);
4499
4500
4501 reg = FDI_TX_CTL(pipe);
4502 temp = I915_READ(reg);
4503 temp &= ~FDI_DP_PORT_WIDTH_MASK;
4504 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
4505 temp &= ~FDI_LINK_TRAIN_NONE;
4506 temp |= FDI_LINK_TRAIN_PATTERN_1;
4507 I915_WRITE(reg, temp | FDI_TX_ENABLE);
4508
4509 reg = FDI_RX_CTL(pipe);
4510 temp = I915_READ(reg);
4511 temp &= ~FDI_LINK_TRAIN_NONE;
4512 temp |= FDI_LINK_TRAIN_PATTERN_1;
4513 I915_WRITE(reg, temp | FDI_RX_ENABLE);
4514
4515 POSTING_READ(reg);
4516 udelay(150);
4517
4518
4519 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR);
4520 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR |
4521 FDI_RX_PHASE_SYNC_POINTER_EN);
4522
4523 reg = FDI_RX_IIR(pipe);
4524 for (tries = 0; tries < 5; tries++) {
4525 temp = I915_READ(reg);
4526 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4527
4528 if ((temp & FDI_RX_BIT_LOCK)) {
4529 DRM_DEBUG_KMS("FDI train 1 done.\n");
4530 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
4531 break;
4532 }
4533 }
4534 if (tries == 5)
4535 DRM_ERROR("FDI train 1 fail!\n");
4536
4537
4538 reg = FDI_TX_CTL(pipe);
4539 temp = I915_READ(reg);
4540 temp &= ~FDI_LINK_TRAIN_NONE;
4541 temp |= FDI_LINK_TRAIN_PATTERN_2;
4542 I915_WRITE(reg, temp);
4543
4544 reg = FDI_RX_CTL(pipe);
4545 temp = I915_READ(reg);
4546 temp &= ~FDI_LINK_TRAIN_NONE;
4547 temp |= FDI_LINK_TRAIN_PATTERN_2;
4548 I915_WRITE(reg, temp);
4549
4550 POSTING_READ(reg);
4551 udelay(150);
4552
4553 reg = FDI_RX_IIR(pipe);
4554 for (tries = 0; tries < 5; tries++) {
4555 temp = I915_READ(reg);
4556 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4557
4558 if (temp & FDI_RX_SYMBOL_LOCK) {
4559 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
4560 DRM_DEBUG_KMS("FDI train 2 done.\n");
4561 break;
4562 }
4563 }
4564 if (tries == 5)
4565 DRM_ERROR("FDI train 2 fail!\n");
4566
4567 DRM_DEBUG_KMS("FDI train done\n");
4568
4569 }
4570
4571 static const int snb_b_fdi_train_param[] = {
4572 FDI_LINK_TRAIN_400MV_0DB_SNB_B,
4573 FDI_LINK_TRAIN_400MV_6DB_SNB_B,
4574 FDI_LINK_TRAIN_600MV_3_5DB_SNB_B,
4575 FDI_LINK_TRAIN_800MV_0DB_SNB_B,
4576 };
4577
4578
4579 static void gen6_fdi_link_train(struct intel_crtc *crtc,
4580 const struct intel_crtc_state *crtc_state)
4581 {
4582 struct drm_device *dev = crtc->base.dev;
4583 struct drm_i915_private *dev_priv = to_i915(dev);
4584 int pipe = crtc->pipe;
4585 i915_reg_t reg;
4586 u32 temp, i, retry;
4587
4588
4589
4590 reg = FDI_RX_IMR(pipe);
4591 temp = I915_READ(reg);
4592 temp &= ~FDI_RX_SYMBOL_LOCK;
4593 temp &= ~FDI_RX_BIT_LOCK;
4594 I915_WRITE(reg, temp);
4595
4596 POSTING_READ(reg);
4597 udelay(150);
4598
4599
4600 reg = FDI_TX_CTL(pipe);
4601 temp = I915_READ(reg);
4602 temp &= ~FDI_DP_PORT_WIDTH_MASK;
4603 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
4604 temp &= ~FDI_LINK_TRAIN_NONE;
4605 temp |= FDI_LINK_TRAIN_PATTERN_1;
4606 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
4607
4608 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
4609 I915_WRITE(reg, temp | FDI_TX_ENABLE);
4610
4611 I915_WRITE(FDI_RX_MISC(pipe),
4612 FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
4613
4614 reg = FDI_RX_CTL(pipe);
4615 temp = I915_READ(reg);
4616 if (HAS_PCH_CPT(dev_priv)) {
4617 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4618 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
4619 } else {
4620 temp &= ~FDI_LINK_TRAIN_NONE;
4621 temp |= FDI_LINK_TRAIN_PATTERN_1;
4622 }
4623 I915_WRITE(reg, temp | FDI_RX_ENABLE);
4624
4625 POSTING_READ(reg);
4626 udelay(150);
4627
4628 for (i = 0; i < 4; i++) {
4629 reg = FDI_TX_CTL(pipe);
4630 temp = I915_READ(reg);
4631 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
4632 temp |= snb_b_fdi_train_param[i];
4633 I915_WRITE(reg, temp);
4634
4635 POSTING_READ(reg);
4636 udelay(500);
4637
4638 for (retry = 0; retry < 5; retry++) {
4639 reg = FDI_RX_IIR(pipe);
4640 temp = I915_READ(reg);
4641 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4642 if (temp & FDI_RX_BIT_LOCK) {
4643 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
4644 DRM_DEBUG_KMS("FDI train 1 done.\n");
4645 break;
4646 }
4647 udelay(50);
4648 }
4649 if (retry < 5)
4650 break;
4651 }
4652 if (i == 4)
4653 DRM_ERROR("FDI train 1 fail!\n");
4654
4655
4656 reg = FDI_TX_CTL(pipe);
4657 temp = I915_READ(reg);
4658 temp &= ~FDI_LINK_TRAIN_NONE;
4659 temp |= FDI_LINK_TRAIN_PATTERN_2;
4660 if (IS_GEN(dev_priv, 6)) {
4661 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
4662
4663 temp |= FDI_LINK_TRAIN_400MV_0DB_SNB_B;
4664 }
4665 I915_WRITE(reg, temp);
4666
4667 reg = FDI_RX_CTL(pipe);
4668 temp = I915_READ(reg);
4669 if (HAS_PCH_CPT(dev_priv)) {
4670 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4671 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
4672 } else {
4673 temp &= ~FDI_LINK_TRAIN_NONE;
4674 temp |= FDI_LINK_TRAIN_PATTERN_2;
4675 }
4676 I915_WRITE(reg, temp);
4677
4678 POSTING_READ(reg);
4679 udelay(150);
4680
4681 for (i = 0; i < 4; i++) {
4682 reg = FDI_TX_CTL(pipe);
4683 temp = I915_READ(reg);
4684 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
4685 temp |= snb_b_fdi_train_param[i];
4686 I915_WRITE(reg, temp);
4687
4688 POSTING_READ(reg);
4689 udelay(500);
4690
4691 for (retry = 0; retry < 5; retry++) {
4692 reg = FDI_RX_IIR(pipe);
4693 temp = I915_READ(reg);
4694 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4695 if (temp & FDI_RX_SYMBOL_LOCK) {
4696 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
4697 DRM_DEBUG_KMS("FDI train 2 done.\n");
4698 break;
4699 }
4700 udelay(50);
4701 }
4702 if (retry < 5)
4703 break;
4704 }
4705 if (i == 4)
4706 DRM_ERROR("FDI train 2 fail!\n");
4707
4708 DRM_DEBUG_KMS("FDI train done.\n");
4709 }
4710
4711
4712 static void ivb_manual_fdi_link_train(struct intel_crtc *crtc,
4713 const struct intel_crtc_state *crtc_state)
4714 {
4715 struct drm_device *dev = crtc->base.dev;
4716 struct drm_i915_private *dev_priv = to_i915(dev);
4717 int pipe = crtc->pipe;
4718 i915_reg_t reg;
4719 u32 temp, i, j;
4720
4721
4722
4723 reg = FDI_RX_IMR(pipe);
4724 temp = I915_READ(reg);
4725 temp &= ~FDI_RX_SYMBOL_LOCK;
4726 temp &= ~FDI_RX_BIT_LOCK;
4727 I915_WRITE(reg, temp);
4728
4729 POSTING_READ(reg);
4730 udelay(150);
4731
4732 DRM_DEBUG_KMS("FDI_RX_IIR before link train 0x%x\n",
4733 I915_READ(FDI_RX_IIR(pipe)));
4734
4735
4736 for (j = 0; j < ARRAY_SIZE(snb_b_fdi_train_param) * 2; j++) {
4737
4738 reg = FDI_TX_CTL(pipe);
4739 temp = I915_READ(reg);
4740 temp &= ~(FDI_LINK_TRAIN_AUTO | FDI_LINK_TRAIN_NONE_IVB);
4741 temp &= ~FDI_TX_ENABLE;
4742 I915_WRITE(reg, temp);
4743
4744 reg = FDI_RX_CTL(pipe);
4745 temp = I915_READ(reg);
4746 temp &= ~FDI_LINK_TRAIN_AUTO;
4747 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4748 temp &= ~FDI_RX_ENABLE;
4749 I915_WRITE(reg, temp);
4750
4751
4752 reg = FDI_TX_CTL(pipe);
4753 temp = I915_READ(reg);
4754 temp &= ~FDI_DP_PORT_WIDTH_MASK;
4755 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
4756 temp |= FDI_LINK_TRAIN_PATTERN_1_IVB;
4757 temp &= ~FDI_LINK_TRAIN_VOL_EMP_MASK;
4758 temp |= snb_b_fdi_train_param[j/2];
4759 temp |= FDI_COMPOSITE_SYNC;
4760 I915_WRITE(reg, temp | FDI_TX_ENABLE);
4761
4762 I915_WRITE(FDI_RX_MISC(pipe),
4763 FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
4764
4765 reg = FDI_RX_CTL(pipe);
4766 temp = I915_READ(reg);
4767 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
4768 temp |= FDI_COMPOSITE_SYNC;
4769 I915_WRITE(reg, temp | FDI_RX_ENABLE);
4770
4771 POSTING_READ(reg);
4772 udelay(1);
4773
4774 for (i = 0; i < 4; i++) {
4775 reg = FDI_RX_IIR(pipe);
4776 temp = I915_READ(reg);
4777 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4778
4779 if (temp & FDI_RX_BIT_LOCK ||
4780 (I915_READ(reg) & FDI_RX_BIT_LOCK)) {
4781 I915_WRITE(reg, temp | FDI_RX_BIT_LOCK);
4782 DRM_DEBUG_KMS("FDI train 1 done, level %i.\n",
4783 i);
4784 break;
4785 }
4786 udelay(1);
4787 }
4788 if (i == 4) {
4789 DRM_DEBUG_KMS("FDI train 1 fail on vswing %d\n", j / 2);
4790 continue;
4791 }
4792
4793
4794 reg = FDI_TX_CTL(pipe);
4795 temp = I915_READ(reg);
4796 temp &= ~FDI_LINK_TRAIN_NONE_IVB;
4797 temp |= FDI_LINK_TRAIN_PATTERN_2_IVB;
4798 I915_WRITE(reg, temp);
4799
4800 reg = FDI_RX_CTL(pipe);
4801 temp = I915_READ(reg);
4802 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4803 temp |= FDI_LINK_TRAIN_PATTERN_2_CPT;
4804 I915_WRITE(reg, temp);
4805
4806 POSTING_READ(reg);
4807 udelay(2);
4808
4809 for (i = 0; i < 4; i++) {
4810 reg = FDI_RX_IIR(pipe);
4811 temp = I915_READ(reg);
4812 DRM_DEBUG_KMS("FDI_RX_IIR 0x%x\n", temp);
4813
4814 if (temp & FDI_RX_SYMBOL_LOCK ||
4815 (I915_READ(reg) & FDI_RX_SYMBOL_LOCK)) {
4816 I915_WRITE(reg, temp | FDI_RX_SYMBOL_LOCK);
4817 DRM_DEBUG_KMS("FDI train 2 done, level %i.\n",
4818 i);
4819 goto train_done;
4820 }
4821 udelay(2);
4822 }
4823 if (i == 4)
4824 DRM_DEBUG_KMS("FDI train 2 fail on vswing %d\n", j / 2);
4825 }
4826
4827 train_done:
4828 DRM_DEBUG_KMS("FDI train done.\n");
4829 }
4830
4831 static void ironlake_fdi_pll_enable(const struct intel_crtc_state *crtc_state)
4832 {
4833 struct intel_crtc *intel_crtc = to_intel_crtc(crtc_state->base.crtc);
4834 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
4835 int pipe = intel_crtc->pipe;
4836 i915_reg_t reg;
4837 u32 temp;
4838
4839
4840 reg = FDI_RX_CTL(pipe);
4841 temp = I915_READ(reg);
4842 temp &= ~(FDI_DP_PORT_WIDTH_MASK | (0x7 << 16));
4843 temp |= FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
4844 temp |= (I915_READ(PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
4845 I915_WRITE(reg, temp | FDI_RX_PLL_ENABLE);
4846
4847 POSTING_READ(reg);
4848 udelay(200);
4849
4850
4851 temp = I915_READ(reg);
4852 I915_WRITE(reg, temp | FDI_PCDCLK);
4853
4854 POSTING_READ(reg);
4855 udelay(200);
4856
4857
4858 reg = FDI_TX_CTL(pipe);
4859 temp = I915_READ(reg);
4860 if ((temp & FDI_TX_PLL_ENABLE) == 0) {
4861 I915_WRITE(reg, temp | FDI_TX_PLL_ENABLE);
4862
4863 POSTING_READ(reg);
4864 udelay(100);
4865 }
4866 }
4867
4868 static void ironlake_fdi_pll_disable(struct intel_crtc *intel_crtc)
4869 {
4870 struct drm_device *dev = intel_crtc->base.dev;
4871 struct drm_i915_private *dev_priv = to_i915(dev);
4872 int pipe = intel_crtc->pipe;
4873 i915_reg_t reg;
4874 u32 temp;
4875
4876
4877 reg = FDI_RX_CTL(pipe);
4878 temp = I915_READ(reg);
4879 I915_WRITE(reg, temp & ~FDI_PCDCLK);
4880
4881
4882 reg = FDI_TX_CTL(pipe);
4883 temp = I915_READ(reg);
4884 I915_WRITE(reg, temp & ~FDI_TX_PLL_ENABLE);
4885
4886 POSTING_READ(reg);
4887 udelay(100);
4888
4889 reg = FDI_RX_CTL(pipe);
4890 temp = I915_READ(reg);
4891 I915_WRITE(reg, temp & ~FDI_RX_PLL_ENABLE);
4892
4893
4894 POSTING_READ(reg);
4895 udelay(100);
4896 }
4897
4898 static void ironlake_fdi_disable(struct drm_crtc *crtc)
4899 {
4900 struct drm_device *dev = crtc->dev;
4901 struct drm_i915_private *dev_priv = to_i915(dev);
4902 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
4903 int pipe = intel_crtc->pipe;
4904 i915_reg_t reg;
4905 u32 temp;
4906
4907
4908 reg = FDI_TX_CTL(pipe);
4909 temp = I915_READ(reg);
4910 I915_WRITE(reg, temp & ~FDI_TX_ENABLE);
4911 POSTING_READ(reg);
4912
4913 reg = FDI_RX_CTL(pipe);
4914 temp = I915_READ(reg);
4915 temp &= ~(0x7 << 16);
4916 temp |= (I915_READ(PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
4917 I915_WRITE(reg, temp & ~FDI_RX_ENABLE);
4918
4919 POSTING_READ(reg);
4920 udelay(100);
4921
4922
4923 if (HAS_PCH_IBX(dev_priv))
4924 I915_WRITE(FDI_RX_CHICKEN(pipe), FDI_RX_PHASE_SYNC_POINTER_OVR);
4925
4926
4927 reg = FDI_TX_CTL(pipe);
4928 temp = I915_READ(reg);
4929 temp &= ~FDI_LINK_TRAIN_NONE;
4930 temp |= FDI_LINK_TRAIN_PATTERN_1;
4931 I915_WRITE(reg, temp);
4932
4933 reg = FDI_RX_CTL(pipe);
4934 temp = I915_READ(reg);
4935 if (HAS_PCH_CPT(dev_priv)) {
4936 temp &= ~FDI_LINK_TRAIN_PATTERN_MASK_CPT;
4937 temp |= FDI_LINK_TRAIN_PATTERN_1_CPT;
4938 } else {
4939 temp &= ~FDI_LINK_TRAIN_NONE;
4940 temp |= FDI_LINK_TRAIN_PATTERN_1;
4941 }
4942
4943 temp &= ~(0x07 << 16);
4944 temp |= (I915_READ(PIPECONF(pipe)) & PIPECONF_BPC_MASK) << 11;
4945 I915_WRITE(reg, temp);
4946
4947 POSTING_READ(reg);
4948 udelay(100);
4949 }
4950
4951 bool intel_has_pending_fb_unpin(struct drm_i915_private *dev_priv)
4952 {
4953 struct drm_crtc *crtc;
4954 bool cleanup_done;
4955
4956 drm_for_each_crtc(crtc, &dev_priv->drm) {
4957 struct drm_crtc_commit *commit;
4958 spin_lock(&crtc->commit_lock);
4959 commit = list_first_entry_or_null(&crtc->commit_list,
4960 struct drm_crtc_commit, commit_entry);
4961 cleanup_done = commit ?
4962 try_wait_for_completion(&commit->cleanup_done) : true;
4963 spin_unlock(&crtc->commit_lock);
4964
4965 if (cleanup_done)
4966 continue;
4967
4968 drm_crtc_wait_one_vblank(crtc);
4969
4970 return true;
4971 }
4972
4973 return false;
4974 }
4975
4976 void lpt_disable_iclkip(struct drm_i915_private *dev_priv)
4977 {
4978 u32 temp;
4979
4980 I915_WRITE(PIXCLK_GATE, PIXCLK_GATE_GATE);
4981
4982 mutex_lock(&dev_priv->sb_lock);
4983
4984 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
4985 temp |= SBI_SSCCTL_DISABLE;
4986 intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
4987
4988 mutex_unlock(&dev_priv->sb_lock);
4989 }
4990
4991
4992 static void lpt_program_iclkip(const struct intel_crtc_state *crtc_state)
4993 {
4994 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
4995 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
4996 int clock = crtc_state->base.adjusted_mode.crtc_clock;
4997 u32 divsel, phaseinc, auxdiv, phasedir = 0;
4998 u32 temp;
4999
5000 lpt_disable_iclkip(dev_priv);
5001
5002
5003
5004
5005
5006
5007
5008 for (auxdiv = 0; auxdiv < 2; auxdiv++) {
5009 u32 iclk_virtual_root_freq = 172800 * 1000;
5010 u32 iclk_pi_range = 64;
5011 u32 desired_divisor;
5012
5013 desired_divisor = DIV_ROUND_CLOSEST(iclk_virtual_root_freq,
5014 clock << auxdiv);
5015 divsel = (desired_divisor / iclk_pi_range) - 2;
5016 phaseinc = desired_divisor % iclk_pi_range;
5017
5018
5019
5020
5021
5022 if (divsel <= 0x7f)
5023 break;
5024 }
5025
5026
5027 WARN_ON(SBI_SSCDIVINTPHASE_DIVSEL(divsel) &
5028 ~SBI_SSCDIVINTPHASE_DIVSEL_MASK);
5029 WARN_ON(SBI_SSCDIVINTPHASE_DIR(phasedir) &
5030 ~SBI_SSCDIVINTPHASE_INCVAL_MASK);
5031
5032 DRM_DEBUG_KMS("iCLKIP clock: found settings for %dKHz refresh rate: auxdiv=%x, divsel=%x, phasedir=%x, phaseinc=%x\n",
5033 clock,
5034 auxdiv,
5035 divsel,
5036 phasedir,
5037 phaseinc);
5038
5039 mutex_lock(&dev_priv->sb_lock);
5040
5041
5042 temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
5043 temp &= ~SBI_SSCDIVINTPHASE_DIVSEL_MASK;
5044 temp |= SBI_SSCDIVINTPHASE_DIVSEL(divsel);
5045 temp &= ~SBI_SSCDIVINTPHASE_INCVAL_MASK;
5046 temp |= SBI_SSCDIVINTPHASE_INCVAL(phaseinc);
5047 temp |= SBI_SSCDIVINTPHASE_DIR(phasedir);
5048 temp |= SBI_SSCDIVINTPHASE_PROPAGATE;
5049 intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE6, temp, SBI_ICLK);
5050
5051
5052 temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
5053 temp &= ~SBI_SSCAUXDIV_FINALDIV2SEL(1);
5054 temp |= SBI_SSCAUXDIV_FINALDIV2SEL(auxdiv);
5055 intel_sbi_write(dev_priv, SBI_SSCAUXDIV6, temp, SBI_ICLK);
5056
5057
5058 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
5059 temp &= ~SBI_SSCCTL_DISABLE;
5060 intel_sbi_write(dev_priv, SBI_SSCCTL6, temp, SBI_ICLK);
5061
5062 mutex_unlock(&dev_priv->sb_lock);
5063
5064
5065 udelay(24);
5066
5067 I915_WRITE(PIXCLK_GATE, PIXCLK_GATE_UNGATE);
5068 }
5069
5070 int lpt_get_iclkip(struct drm_i915_private *dev_priv)
5071 {
5072 u32 divsel, phaseinc, auxdiv;
5073 u32 iclk_virtual_root_freq = 172800 * 1000;
5074 u32 iclk_pi_range = 64;
5075 u32 desired_divisor;
5076 u32 temp;
5077
5078 if ((I915_READ(PIXCLK_GATE) & PIXCLK_GATE_UNGATE) == 0)
5079 return 0;
5080
5081 mutex_lock(&dev_priv->sb_lock);
5082
5083 temp = intel_sbi_read(dev_priv, SBI_SSCCTL6, SBI_ICLK);
5084 if (temp & SBI_SSCCTL_DISABLE) {
5085 mutex_unlock(&dev_priv->sb_lock);
5086 return 0;
5087 }
5088
5089 temp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE6, SBI_ICLK);
5090 divsel = (temp & SBI_SSCDIVINTPHASE_DIVSEL_MASK) >>
5091 SBI_SSCDIVINTPHASE_DIVSEL_SHIFT;
5092 phaseinc = (temp & SBI_SSCDIVINTPHASE_INCVAL_MASK) >>
5093 SBI_SSCDIVINTPHASE_INCVAL_SHIFT;
5094
5095 temp = intel_sbi_read(dev_priv, SBI_SSCAUXDIV6, SBI_ICLK);
5096 auxdiv = (temp & SBI_SSCAUXDIV_FINALDIV2SEL_MASK) >>
5097 SBI_SSCAUXDIV_FINALDIV2SEL_SHIFT;
5098
5099 mutex_unlock(&dev_priv->sb_lock);
5100
5101 desired_divisor = (divsel + 2) * iclk_pi_range + phaseinc;
5102
5103 return DIV_ROUND_CLOSEST(iclk_virtual_root_freq,
5104 desired_divisor << auxdiv);
5105 }
5106
5107 static void ironlake_pch_transcoder_set_timings(const struct intel_crtc_state *crtc_state,
5108 enum pipe pch_transcoder)
5109 {
5110 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5111 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5112 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
5113
5114 I915_WRITE(PCH_TRANS_HTOTAL(pch_transcoder),
5115 I915_READ(HTOTAL(cpu_transcoder)));
5116 I915_WRITE(PCH_TRANS_HBLANK(pch_transcoder),
5117 I915_READ(HBLANK(cpu_transcoder)));
5118 I915_WRITE(PCH_TRANS_HSYNC(pch_transcoder),
5119 I915_READ(HSYNC(cpu_transcoder)));
5120
5121 I915_WRITE(PCH_TRANS_VTOTAL(pch_transcoder),
5122 I915_READ(VTOTAL(cpu_transcoder)));
5123 I915_WRITE(PCH_TRANS_VBLANK(pch_transcoder),
5124 I915_READ(VBLANK(cpu_transcoder)));
5125 I915_WRITE(PCH_TRANS_VSYNC(pch_transcoder),
5126 I915_READ(VSYNC(cpu_transcoder)));
5127 I915_WRITE(PCH_TRANS_VSYNCSHIFT(pch_transcoder),
5128 I915_READ(VSYNCSHIFT(cpu_transcoder)));
5129 }
5130
5131 static void cpt_set_fdi_bc_bifurcation(struct drm_i915_private *dev_priv, bool enable)
5132 {
5133 u32 temp;
5134
5135 temp = I915_READ(SOUTH_CHICKEN1);
5136 if (!!(temp & FDI_BC_BIFURCATION_SELECT) == enable)
5137 return;
5138
5139 WARN_ON(I915_READ(FDI_RX_CTL(PIPE_B)) & FDI_RX_ENABLE);
5140 WARN_ON(I915_READ(FDI_RX_CTL(PIPE_C)) & FDI_RX_ENABLE);
5141
5142 temp &= ~FDI_BC_BIFURCATION_SELECT;
5143 if (enable)
5144 temp |= FDI_BC_BIFURCATION_SELECT;
5145
5146 DRM_DEBUG_KMS("%sabling fdi C rx\n", enable ? "en" : "dis");
5147 I915_WRITE(SOUTH_CHICKEN1, temp);
5148 POSTING_READ(SOUTH_CHICKEN1);
5149 }
5150
5151 static void ivybridge_update_fdi_bc_bifurcation(const struct intel_crtc_state *crtc_state)
5152 {
5153 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5154 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5155
5156 switch (crtc->pipe) {
5157 case PIPE_A:
5158 break;
5159 case PIPE_B:
5160 if (crtc_state->fdi_lanes > 2)
5161 cpt_set_fdi_bc_bifurcation(dev_priv, false);
5162 else
5163 cpt_set_fdi_bc_bifurcation(dev_priv, true);
5164
5165 break;
5166 case PIPE_C:
5167 cpt_set_fdi_bc_bifurcation(dev_priv, true);
5168
5169 break;
5170 default:
5171 BUG();
5172 }
5173 }
5174
5175
5176
5177
5178
5179 static struct intel_encoder *
5180 intel_get_crtc_new_encoder(const struct intel_atomic_state *state,
5181 const struct intel_crtc_state *crtc_state)
5182 {
5183 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5184 const struct drm_connector_state *connector_state;
5185 const struct drm_connector *connector;
5186 struct intel_encoder *encoder = NULL;
5187 int num_encoders = 0;
5188 int i;
5189
5190 for_each_new_connector_in_state(&state->base, connector, connector_state, i) {
5191 if (connector_state->crtc != &crtc->base)
5192 continue;
5193
5194 encoder = to_intel_encoder(connector_state->best_encoder);
5195 num_encoders++;
5196 }
5197
5198 WARN(num_encoders != 1, "%d encoders for pipe %c\n",
5199 num_encoders, pipe_name(crtc->pipe));
5200
5201 return encoder;
5202 }
5203
5204
5205
5206
5207
5208
5209
5210
5211
5212 static void ironlake_pch_enable(const struct intel_atomic_state *state,
5213 const struct intel_crtc_state *crtc_state)
5214 {
5215 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5216 struct drm_device *dev = crtc->base.dev;
5217 struct drm_i915_private *dev_priv = to_i915(dev);
5218 int pipe = crtc->pipe;
5219 u32 temp;
5220
5221 assert_pch_transcoder_disabled(dev_priv, pipe);
5222
5223 if (IS_IVYBRIDGE(dev_priv))
5224 ivybridge_update_fdi_bc_bifurcation(crtc_state);
5225
5226
5227
5228 I915_WRITE(FDI_RX_TUSIZE1(pipe),
5229 I915_READ(PIPE_DATA_M1(pipe)) & TU_SIZE_MASK);
5230
5231
5232 dev_priv->display.fdi_link_train(crtc, crtc_state);
5233
5234
5235
5236 if (HAS_PCH_CPT(dev_priv)) {
5237 u32 sel;
5238
5239 temp = I915_READ(PCH_DPLL_SEL);
5240 temp |= TRANS_DPLL_ENABLE(pipe);
5241 sel = TRANS_DPLLB_SEL(pipe);
5242 if (crtc_state->shared_dpll ==
5243 intel_get_shared_dpll_by_id(dev_priv, DPLL_ID_PCH_PLL_B))
5244 temp |= sel;
5245 else
5246 temp &= ~sel;
5247 I915_WRITE(PCH_DPLL_SEL, temp);
5248 }
5249
5250
5251
5252
5253
5254
5255
5256
5257 intel_enable_shared_dpll(crtc_state);
5258
5259
5260 assert_panel_unlocked(dev_priv, pipe);
5261 ironlake_pch_transcoder_set_timings(crtc_state, pipe);
5262
5263 intel_fdi_normal_train(crtc);
5264
5265
5266 if (HAS_PCH_CPT(dev_priv) &&
5267 intel_crtc_has_dp_encoder(crtc_state)) {
5268 const struct drm_display_mode *adjusted_mode =
5269 &crtc_state->base.adjusted_mode;
5270 u32 bpc = (I915_READ(PIPECONF(pipe)) & PIPECONF_BPC_MASK) >> 5;
5271 i915_reg_t reg = TRANS_DP_CTL(pipe);
5272 enum port port;
5273
5274 temp = I915_READ(reg);
5275 temp &= ~(TRANS_DP_PORT_SEL_MASK |
5276 TRANS_DP_SYNC_MASK |
5277 TRANS_DP_BPC_MASK);
5278 temp |= TRANS_DP_OUTPUT_ENABLE;
5279 temp |= bpc << 9;
5280
5281 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
5282 temp |= TRANS_DP_HSYNC_ACTIVE_HIGH;
5283 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
5284 temp |= TRANS_DP_VSYNC_ACTIVE_HIGH;
5285
5286 port = intel_get_crtc_new_encoder(state, crtc_state)->port;
5287 WARN_ON(port < PORT_B || port > PORT_D);
5288 temp |= TRANS_DP_PORT_SEL(port);
5289
5290 I915_WRITE(reg, temp);
5291 }
5292
5293 ironlake_enable_pch_transcoder(crtc_state);
5294 }
5295
5296 static void lpt_pch_enable(const struct intel_atomic_state *state,
5297 const struct intel_crtc_state *crtc_state)
5298 {
5299 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5300 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5301 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
5302
5303 assert_pch_transcoder_disabled(dev_priv, PIPE_A);
5304
5305 lpt_program_iclkip(crtc_state);
5306
5307
5308 ironlake_pch_transcoder_set_timings(crtc_state, PIPE_A);
5309
5310 lpt_enable_pch_transcoder(dev_priv, cpu_transcoder);
5311 }
5312
5313 static void cpt_verify_modeset(struct drm_device *dev, int pipe)
5314 {
5315 struct drm_i915_private *dev_priv = to_i915(dev);
5316 i915_reg_t dslreg = PIPEDSL(pipe);
5317 u32 temp;
5318
5319 temp = I915_READ(dslreg);
5320 udelay(500);
5321 if (wait_for(I915_READ(dslreg) != temp, 5)) {
5322 if (wait_for(I915_READ(dslreg) != temp, 5))
5323 DRM_ERROR("mode set failed: pipe %c stuck\n", pipe_name(pipe));
5324 }
5325 }
5326
5327
5328
5329
5330
5331
5332
5333
5334
5335
5336
5337
5338
5339
5340
5341
5342
5343
5344
5345
5346
5347
5348
5349
5350
5351
5352
5353
5354
5355
5356
5357
5358
5359
5360
5361
5362
5363
5364
5365
5366
5367 u16 skl_scaler_calc_phase(int sub, int scale, bool chroma_cosited)
5368 {
5369 int phase = -0x8000;
5370 u16 trip = 0;
5371
5372 if (chroma_cosited)
5373 phase += (sub - 1) * 0x8000 / sub;
5374
5375 phase += scale / (2 * sub);
5376
5377
5378
5379
5380
5381
5382 WARN_ON(phase < -0x8000 || phase > 0x18000);
5383
5384 if (phase < 0)
5385 phase = 0x10000 + phase;
5386 else
5387 trip = PS_PHASE_TRIP;
5388
5389 return ((phase >> 2) & PS_PHASE_MASK) | trip;
5390 }
5391
5392 #define SKL_MIN_SRC_W 8
5393 #define SKL_MAX_SRC_W 4096
5394 #define SKL_MIN_SRC_H 8
5395 #define SKL_MAX_SRC_H 4096
5396 #define SKL_MIN_DST_W 8
5397 #define SKL_MAX_DST_W 4096
5398 #define SKL_MIN_DST_H 8
5399 #define SKL_MAX_DST_H 4096
5400 #define ICL_MAX_SRC_W 5120
5401 #define ICL_MAX_SRC_H 4096
5402 #define ICL_MAX_DST_W 5120
5403 #define ICL_MAX_DST_H 4096
5404 #define SKL_MIN_YUV_420_SRC_W 16
5405 #define SKL_MIN_YUV_420_SRC_H 16
5406
5407 static int
5408 skl_update_scaler(struct intel_crtc_state *crtc_state, bool force_detach,
5409 unsigned int scaler_user, int *scaler_id,
5410 int src_w, int src_h, int dst_w, int dst_h,
5411 const struct drm_format_info *format, bool need_scaler)
5412 {
5413 struct intel_crtc_scaler_state *scaler_state =
5414 &crtc_state->scaler_state;
5415 struct intel_crtc *intel_crtc =
5416 to_intel_crtc(crtc_state->base.crtc);
5417 struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
5418 const struct drm_display_mode *adjusted_mode =
5419 &crtc_state->base.adjusted_mode;
5420
5421
5422
5423
5424
5425
5426 if (src_w != dst_w || src_h != dst_h)
5427 need_scaler = true;
5428
5429
5430
5431
5432
5433
5434
5435 if (INTEL_GEN(dev_priv) >= 9 && crtc_state->base.enable &&
5436 need_scaler && adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
5437 DRM_DEBUG_KMS("Pipe/Plane scaling not supported with IF-ID mode\n");
5438 return -EINVAL;
5439 }
5440
5441
5442
5443
5444
5445
5446
5447
5448
5449
5450
5451 if (force_detach || !need_scaler) {
5452 if (*scaler_id >= 0) {
5453 scaler_state->scaler_users &= ~(1 << scaler_user);
5454 scaler_state->scalers[*scaler_id].in_use = 0;
5455
5456 DRM_DEBUG_KMS("scaler_user index %u.%u: "
5457 "Staged freeing scaler id %d scaler_users = 0x%x\n",
5458 intel_crtc->pipe, scaler_user, *scaler_id,
5459 scaler_state->scaler_users);
5460 *scaler_id = -1;
5461 }
5462 return 0;
5463 }
5464
5465 if (format && is_planar_yuv_format(format->format) &&
5466 (src_h < SKL_MIN_YUV_420_SRC_H || src_w < SKL_MIN_YUV_420_SRC_W)) {
5467 DRM_DEBUG_KMS("Planar YUV: src dimensions not met\n");
5468 return -EINVAL;
5469 }
5470
5471
5472 if (src_w < SKL_MIN_SRC_W || src_h < SKL_MIN_SRC_H ||
5473 dst_w < SKL_MIN_DST_W || dst_h < SKL_MIN_DST_H ||
5474 (INTEL_GEN(dev_priv) >= 11 &&
5475 (src_w > ICL_MAX_SRC_W || src_h > ICL_MAX_SRC_H ||
5476 dst_w > ICL_MAX_DST_W || dst_h > ICL_MAX_DST_H)) ||
5477 (INTEL_GEN(dev_priv) < 11 &&
5478 (src_w > SKL_MAX_SRC_W || src_h > SKL_MAX_SRC_H ||
5479 dst_w > SKL_MAX_DST_W || dst_h > SKL_MAX_DST_H))) {
5480 DRM_DEBUG_KMS("scaler_user index %u.%u: src %ux%u dst %ux%u "
5481 "size is out of scaler range\n",
5482 intel_crtc->pipe, scaler_user, src_w, src_h, dst_w, dst_h);
5483 return -EINVAL;
5484 }
5485
5486
5487 scaler_state->scaler_users |= (1 << scaler_user);
5488 DRM_DEBUG_KMS("scaler_user index %u.%u: "
5489 "staged scaling request for %ux%u->%ux%u scaler_users = 0x%x\n",
5490 intel_crtc->pipe, scaler_user, src_w, src_h, dst_w, dst_h,
5491 scaler_state->scaler_users);
5492
5493 return 0;
5494 }
5495
5496
5497
5498
5499
5500
5501
5502
5503
5504
5505 int skl_update_scaler_crtc(struct intel_crtc_state *state)
5506 {
5507 const struct drm_display_mode *adjusted_mode = &state->base.adjusted_mode;
5508 bool need_scaler = false;
5509
5510 if (state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
5511 need_scaler = true;
5512
5513 return skl_update_scaler(state, !state->base.active, SKL_CRTC_INDEX,
5514 &state->scaler_state.scaler_id,
5515 state->pipe_src_w, state->pipe_src_h,
5516 adjusted_mode->crtc_hdisplay,
5517 adjusted_mode->crtc_vdisplay, NULL, need_scaler);
5518 }
5519
5520
5521
5522
5523
5524
5525
5526
5527
5528
5529 static int skl_update_scaler_plane(struct intel_crtc_state *crtc_state,
5530 struct intel_plane_state *plane_state)
5531 {
5532 struct intel_plane *intel_plane =
5533 to_intel_plane(plane_state->base.plane);
5534 struct drm_i915_private *dev_priv = to_i915(intel_plane->base.dev);
5535 struct drm_framebuffer *fb = plane_state->base.fb;
5536 int ret;
5537 bool force_detach = !fb || !plane_state->base.visible;
5538 bool need_scaler = false;
5539
5540
5541 if (!icl_is_hdr_plane(dev_priv, intel_plane->id) &&
5542 fb && is_planar_yuv_format(fb->format->format))
5543 need_scaler = true;
5544
5545 ret = skl_update_scaler(crtc_state, force_detach,
5546 drm_plane_index(&intel_plane->base),
5547 &plane_state->scaler_id,
5548 drm_rect_width(&plane_state->base.src) >> 16,
5549 drm_rect_height(&plane_state->base.src) >> 16,
5550 drm_rect_width(&plane_state->base.dst),
5551 drm_rect_height(&plane_state->base.dst),
5552 fb ? fb->format : NULL, need_scaler);
5553
5554 if (ret || plane_state->scaler_id < 0)
5555 return ret;
5556
5557
5558 if (plane_state->ckey.flags) {
5559 DRM_DEBUG_KMS("[PLANE:%d:%s] scaling with color key not allowed",
5560 intel_plane->base.base.id,
5561 intel_plane->base.name);
5562 return -EINVAL;
5563 }
5564
5565
5566 switch (fb->format->format) {
5567 case DRM_FORMAT_RGB565:
5568 case DRM_FORMAT_XBGR8888:
5569 case DRM_FORMAT_XRGB8888:
5570 case DRM_FORMAT_ABGR8888:
5571 case DRM_FORMAT_ARGB8888:
5572 case DRM_FORMAT_XRGB2101010:
5573 case DRM_FORMAT_XBGR2101010:
5574 case DRM_FORMAT_XBGR16161616F:
5575 case DRM_FORMAT_ABGR16161616F:
5576 case DRM_FORMAT_XRGB16161616F:
5577 case DRM_FORMAT_ARGB16161616F:
5578 case DRM_FORMAT_YUYV:
5579 case DRM_FORMAT_YVYU:
5580 case DRM_FORMAT_UYVY:
5581 case DRM_FORMAT_VYUY:
5582 case DRM_FORMAT_NV12:
5583 case DRM_FORMAT_P010:
5584 case DRM_FORMAT_P012:
5585 case DRM_FORMAT_P016:
5586 case DRM_FORMAT_Y210:
5587 case DRM_FORMAT_Y212:
5588 case DRM_FORMAT_Y216:
5589 case DRM_FORMAT_XVYU2101010:
5590 case DRM_FORMAT_XVYU12_16161616:
5591 case DRM_FORMAT_XVYU16161616:
5592 break;
5593 default:
5594 DRM_DEBUG_KMS("[PLANE:%d:%s] FB:%d unsupported scaling format 0x%x\n",
5595 intel_plane->base.base.id, intel_plane->base.name,
5596 fb->base.id, fb->format->format);
5597 return -EINVAL;
5598 }
5599
5600 return 0;
5601 }
5602
5603 static void skylake_scaler_disable(struct intel_crtc *crtc)
5604 {
5605 int i;
5606
5607 for (i = 0; i < crtc->num_scalers; i++)
5608 skl_detach_scaler(crtc, i);
5609 }
5610
5611 static void skylake_pfit_enable(const struct intel_crtc_state *crtc_state)
5612 {
5613 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5614 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5615 enum pipe pipe = crtc->pipe;
5616 const struct intel_crtc_scaler_state *scaler_state =
5617 &crtc_state->scaler_state;
5618
5619 if (crtc_state->pch_pfit.enabled) {
5620 u16 uv_rgb_hphase, uv_rgb_vphase;
5621 int pfit_w, pfit_h, hscale, vscale;
5622 int id;
5623
5624 if (WARN_ON(crtc_state->scaler_state.scaler_id < 0))
5625 return;
5626
5627 pfit_w = (crtc_state->pch_pfit.size >> 16) & 0xFFFF;
5628 pfit_h = crtc_state->pch_pfit.size & 0xFFFF;
5629
5630 hscale = (crtc_state->pipe_src_w << 16) / pfit_w;
5631 vscale = (crtc_state->pipe_src_h << 16) / pfit_h;
5632
5633 uv_rgb_hphase = skl_scaler_calc_phase(1, hscale, false);
5634 uv_rgb_vphase = skl_scaler_calc_phase(1, vscale, false);
5635
5636 id = scaler_state->scaler_id;
5637 I915_WRITE(SKL_PS_CTRL(pipe, id), PS_SCALER_EN |
5638 PS_FILTER_MEDIUM | scaler_state->scalers[id].mode);
5639 I915_WRITE_FW(SKL_PS_VPHASE(pipe, id),
5640 PS_Y_PHASE(0) | PS_UV_RGB_PHASE(uv_rgb_vphase));
5641 I915_WRITE_FW(SKL_PS_HPHASE(pipe, id),
5642 PS_Y_PHASE(0) | PS_UV_RGB_PHASE(uv_rgb_hphase));
5643 I915_WRITE(SKL_PS_WIN_POS(pipe, id), crtc_state->pch_pfit.pos);
5644 I915_WRITE(SKL_PS_WIN_SZ(pipe, id), crtc_state->pch_pfit.size);
5645 }
5646 }
5647
5648 static void ironlake_pfit_enable(const struct intel_crtc_state *crtc_state)
5649 {
5650 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5651 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5652 int pipe = crtc->pipe;
5653
5654 if (crtc_state->pch_pfit.enabled) {
5655
5656
5657
5658
5659 if (IS_IVYBRIDGE(dev_priv) || IS_HASWELL(dev_priv))
5660 I915_WRITE(PF_CTL(pipe), PF_ENABLE | PF_FILTER_MED_3x3 |
5661 PF_PIPE_SEL_IVB(pipe));
5662 else
5663 I915_WRITE(PF_CTL(pipe), PF_ENABLE | PF_FILTER_MED_3x3);
5664 I915_WRITE(PF_WIN_POS(pipe), crtc_state->pch_pfit.pos);
5665 I915_WRITE(PF_WIN_SZ(pipe), crtc_state->pch_pfit.size);
5666 }
5667 }
5668
5669 void hsw_enable_ips(const struct intel_crtc_state *crtc_state)
5670 {
5671 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5672 struct drm_device *dev = crtc->base.dev;
5673 struct drm_i915_private *dev_priv = to_i915(dev);
5674
5675 if (!crtc_state->ips_enabled)
5676 return;
5677
5678
5679
5680
5681
5682
5683 WARN_ON(!(crtc_state->active_planes & ~BIT(PLANE_CURSOR)));
5684
5685 if (IS_BROADWELL(dev_priv)) {
5686 WARN_ON(sandybridge_pcode_write(dev_priv, DISPLAY_IPS_CONTROL,
5687 IPS_ENABLE | IPS_PCODE_CONTROL));
5688
5689
5690
5691
5692
5693 } else {
5694 I915_WRITE(IPS_CTL, IPS_ENABLE);
5695
5696
5697
5698
5699
5700 if (intel_de_wait_for_set(dev_priv, IPS_CTL, IPS_ENABLE, 50))
5701 DRM_ERROR("Timed out waiting for IPS enable\n");
5702 }
5703 }
5704
5705 void hsw_disable_ips(const struct intel_crtc_state *crtc_state)
5706 {
5707 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
5708 struct drm_device *dev = crtc->base.dev;
5709 struct drm_i915_private *dev_priv = to_i915(dev);
5710
5711 if (!crtc_state->ips_enabled)
5712 return;
5713
5714 if (IS_BROADWELL(dev_priv)) {
5715 WARN_ON(sandybridge_pcode_write(dev_priv, DISPLAY_IPS_CONTROL, 0));
5716
5717
5718
5719
5720
5721 if (intel_de_wait_for_clear(dev_priv, IPS_CTL, IPS_ENABLE, 100))
5722 DRM_ERROR("Timed out waiting for IPS disable\n");
5723 } else {
5724 I915_WRITE(IPS_CTL, 0);
5725 POSTING_READ(IPS_CTL);
5726 }
5727
5728
5729 intel_wait_for_vblank(dev_priv, crtc->pipe);
5730 }
5731
5732 static void intel_crtc_dpms_overlay_disable(struct intel_crtc *intel_crtc)
5733 {
5734 if (intel_crtc->overlay) {
5735 struct drm_device *dev = intel_crtc->base.dev;
5736
5737 mutex_lock(&dev->struct_mutex);
5738 (void) intel_overlay_switch_off(intel_crtc->overlay);
5739 mutex_unlock(&dev->struct_mutex);
5740 }
5741
5742
5743
5744
5745 }
5746
5747
5748
5749
5750
5751
5752
5753
5754
5755
5756
5757
5758 static void
5759 intel_post_enable_primary(struct drm_crtc *crtc,
5760 const struct intel_crtc_state *new_crtc_state)
5761 {
5762 struct drm_device *dev = crtc->dev;
5763 struct drm_i915_private *dev_priv = to_i915(dev);
5764 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5765 int pipe = intel_crtc->pipe;
5766
5767
5768
5769
5770
5771
5772
5773
5774 if (IS_GEN(dev_priv, 2))
5775 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
5776
5777
5778 intel_check_cpu_fifo_underruns(dev_priv);
5779 intel_check_pch_fifo_underruns(dev_priv);
5780 }
5781
5782
5783 static void
5784 intel_pre_disable_primary_noatomic(struct drm_crtc *crtc)
5785 {
5786 struct drm_device *dev = crtc->dev;
5787 struct drm_i915_private *dev_priv = to_i915(dev);
5788 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
5789 int pipe = intel_crtc->pipe;
5790
5791
5792
5793
5794
5795 if (IS_GEN(dev_priv, 2))
5796 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
5797
5798 hsw_disable_ips(to_intel_crtc_state(crtc->state));
5799
5800
5801
5802
5803
5804
5805
5806
5807
5808
5809 if (HAS_GMCH(dev_priv) &&
5810 intel_set_memory_cxsr(dev_priv, false))
5811 intel_wait_for_vblank(dev_priv, pipe);
5812 }
5813
5814 static bool hsw_pre_update_disable_ips(const struct intel_crtc_state *old_crtc_state,
5815 const struct intel_crtc_state *new_crtc_state)
5816 {
5817 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
5818 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5819
5820 if (!old_crtc_state->ips_enabled)
5821 return false;
5822
5823 if (needs_modeset(new_crtc_state))
5824 return true;
5825
5826
5827
5828
5829
5830
5831
5832 if (IS_HASWELL(dev_priv) &&
5833 (new_crtc_state->base.color_mgmt_changed ||
5834 new_crtc_state->update_pipe) &&
5835 new_crtc_state->gamma_mode == GAMMA_MODE_MODE_SPLIT)
5836 return true;
5837
5838 return !new_crtc_state->ips_enabled;
5839 }
5840
5841 static bool hsw_post_update_enable_ips(const struct intel_crtc_state *old_crtc_state,
5842 const struct intel_crtc_state *new_crtc_state)
5843 {
5844 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
5845 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
5846
5847 if (!new_crtc_state->ips_enabled)
5848 return false;
5849
5850 if (needs_modeset(new_crtc_state))
5851 return true;
5852
5853
5854
5855
5856
5857
5858
5859 if (IS_HASWELL(dev_priv) &&
5860 (new_crtc_state->base.color_mgmt_changed ||
5861 new_crtc_state->update_pipe) &&
5862 new_crtc_state->gamma_mode == GAMMA_MODE_MODE_SPLIT)
5863 return true;
5864
5865
5866
5867
5868
5869 if (new_crtc_state->update_pipe &&
5870 old_crtc_state->base.adjusted_mode.private_flags & I915_MODE_FLAG_INHERITED)
5871 return true;
5872
5873 return !old_crtc_state->ips_enabled;
5874 }
5875
5876 static bool needs_nv12_wa(struct drm_i915_private *dev_priv,
5877 const struct intel_crtc_state *crtc_state)
5878 {
5879 if (!crtc_state->nv12_planes)
5880 return false;
5881
5882
5883 if (IS_GEN(dev_priv, 9) && !IS_GEMINILAKE(dev_priv))
5884 return true;
5885
5886 return false;
5887 }
5888
5889 static bool needs_scalerclk_wa(struct drm_i915_private *dev_priv,
5890 const struct intel_crtc_state *crtc_state)
5891 {
5892
5893 if (crtc_state->scaler_state.scaler_users > 0 && IS_ICELAKE(dev_priv))
5894 return true;
5895
5896 return false;
5897 }
5898
5899 static void intel_post_plane_update(struct intel_crtc_state *old_crtc_state)
5900 {
5901 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
5902 struct drm_device *dev = crtc->base.dev;
5903 struct drm_i915_private *dev_priv = to_i915(dev);
5904 struct drm_atomic_state *state = old_crtc_state->base.state;
5905 struct intel_crtc_state *pipe_config =
5906 intel_atomic_get_new_crtc_state(to_intel_atomic_state(state),
5907 crtc);
5908 struct drm_plane *primary = crtc->base.primary;
5909 struct drm_plane_state *old_primary_state =
5910 drm_atomic_get_old_plane_state(state, primary);
5911
5912 intel_frontbuffer_flip(to_i915(crtc->base.dev), pipe_config->fb_bits);
5913
5914 if (pipe_config->update_wm_post && pipe_config->base.active)
5915 intel_update_watermarks(crtc);
5916
5917 if (hsw_post_update_enable_ips(old_crtc_state, pipe_config))
5918 hsw_enable_ips(pipe_config);
5919
5920 if (old_primary_state) {
5921 struct drm_plane_state *new_primary_state =
5922 drm_atomic_get_new_plane_state(state, primary);
5923
5924 intel_fbc_post_update(crtc);
5925
5926 if (new_primary_state->visible &&
5927 (needs_modeset(pipe_config) ||
5928 !old_primary_state->visible))
5929 intel_post_enable_primary(&crtc->base, pipe_config);
5930 }
5931
5932 if (needs_nv12_wa(dev_priv, old_crtc_state) &&
5933 !needs_nv12_wa(dev_priv, pipe_config))
5934 skl_wa_827(dev_priv, crtc->pipe, false);
5935
5936 if (needs_scalerclk_wa(dev_priv, old_crtc_state) &&
5937 !needs_scalerclk_wa(dev_priv, pipe_config))
5938 icl_wa_scalerclkgating(dev_priv, crtc->pipe, false);
5939 }
5940
5941 static void intel_pre_plane_update(struct intel_crtc_state *old_crtc_state,
5942 struct intel_crtc_state *pipe_config)
5943 {
5944 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
5945 struct drm_device *dev = crtc->base.dev;
5946 struct drm_i915_private *dev_priv = to_i915(dev);
5947 struct drm_atomic_state *state = old_crtc_state->base.state;
5948 struct drm_plane *primary = crtc->base.primary;
5949 struct drm_plane_state *old_primary_state =
5950 drm_atomic_get_old_plane_state(state, primary);
5951 bool modeset = needs_modeset(pipe_config);
5952 struct intel_atomic_state *intel_state =
5953 to_intel_atomic_state(state);
5954
5955 if (hsw_pre_update_disable_ips(old_crtc_state, pipe_config))
5956 hsw_disable_ips(old_crtc_state);
5957
5958 if (old_primary_state) {
5959 struct intel_plane_state *new_primary_state =
5960 intel_atomic_get_new_plane_state(intel_state,
5961 to_intel_plane(primary));
5962
5963 intel_fbc_pre_update(crtc, pipe_config, new_primary_state);
5964
5965
5966
5967
5968 if (IS_GEN(dev_priv, 2) && old_primary_state->visible &&
5969 (modeset || !new_primary_state->base.visible))
5970 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, false);
5971 }
5972
5973
5974 if (!needs_nv12_wa(dev_priv, old_crtc_state) &&
5975 needs_nv12_wa(dev_priv, pipe_config))
5976 skl_wa_827(dev_priv, crtc->pipe, true);
5977
5978
5979 if (!needs_scalerclk_wa(dev_priv, old_crtc_state) &&
5980 needs_scalerclk_wa(dev_priv, pipe_config))
5981 icl_wa_scalerclkgating(dev_priv, crtc->pipe, true);
5982
5983
5984
5985
5986
5987
5988
5989
5990
5991
5992 if (HAS_GMCH(dev_priv) && old_crtc_state->base.active &&
5993 pipe_config->disable_cxsr && intel_set_memory_cxsr(dev_priv, false))
5994 intel_wait_for_vblank(dev_priv, crtc->pipe);
5995
5996
5997
5998
5999
6000
6001
6002
6003 if (pipe_config->disable_lp_wm && ilk_disable_lp_wm(dev) &&
6004 old_crtc_state->base.active)
6005 intel_wait_for_vblank(dev_priv, crtc->pipe);
6006
6007
6008
6009
6010
6011 if (needs_modeset(pipe_config))
6012 return;
6013
6014
6015
6016
6017
6018
6019
6020
6021
6022
6023
6024
6025
6026
6027
6028 if (dev_priv->display.initial_watermarks != NULL)
6029 dev_priv->display.initial_watermarks(intel_state,
6030 pipe_config);
6031 else if (pipe_config->update_wm_pre)
6032 intel_update_watermarks(crtc);
6033 }
6034
6035 static void intel_crtc_disable_planes(struct intel_atomic_state *state,
6036 struct intel_crtc *crtc)
6037 {
6038 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6039 const struct intel_crtc_state *new_crtc_state =
6040 intel_atomic_get_new_crtc_state(state, crtc);
6041 unsigned int update_mask = new_crtc_state->update_planes;
6042 const struct intel_plane_state *old_plane_state;
6043 struct intel_plane *plane;
6044 unsigned fb_bits = 0;
6045 int i;
6046
6047 intel_crtc_dpms_overlay_disable(crtc);
6048
6049 for_each_old_intel_plane_in_state(state, plane, old_plane_state, i) {
6050 if (crtc->pipe != plane->pipe ||
6051 !(update_mask & BIT(plane->id)))
6052 continue;
6053
6054 intel_disable_plane(plane, new_crtc_state);
6055
6056 if (old_plane_state->base.visible)
6057 fb_bits |= plane->frontbuffer_bit;
6058 }
6059
6060 intel_frontbuffer_flip(dev_priv, fb_bits);
6061 }
6062
6063
6064
6065
6066
6067
6068
6069
6070
6071
6072 static struct intel_encoder *
6073 intel_connector_primary_encoder(struct intel_connector *connector)
6074 {
6075 struct intel_encoder *encoder;
6076
6077 if (connector->mst_port)
6078 return &dp_to_dig_port(connector->mst_port)->base;
6079
6080 encoder = intel_attached_encoder(&connector->base);
6081 WARN_ON(!encoder);
6082
6083 return encoder;
6084 }
6085
6086 static bool
6087 intel_connector_needs_modeset(struct intel_atomic_state *state,
6088 const struct drm_connector_state *old_conn_state,
6089 const struct drm_connector_state *new_conn_state)
6090 {
6091 struct intel_crtc *old_crtc = old_conn_state->crtc ?
6092 to_intel_crtc(old_conn_state->crtc) : NULL;
6093 struct intel_crtc *new_crtc = new_conn_state->crtc ?
6094 to_intel_crtc(new_conn_state->crtc) : NULL;
6095
6096 return new_crtc != old_crtc ||
6097 (new_crtc &&
6098 needs_modeset(intel_atomic_get_new_crtc_state(state, new_crtc)));
6099 }
6100
6101 static void intel_encoders_update_prepare(struct intel_atomic_state *state)
6102 {
6103 struct drm_connector_state *old_conn_state;
6104 struct drm_connector_state *new_conn_state;
6105 struct drm_connector *conn;
6106 int i;
6107
6108 for_each_oldnew_connector_in_state(&state->base, conn,
6109 old_conn_state, new_conn_state, i) {
6110 struct intel_encoder *encoder;
6111 struct intel_crtc *crtc;
6112
6113 if (!intel_connector_needs_modeset(state,
6114 old_conn_state,
6115 new_conn_state))
6116 continue;
6117
6118 encoder = intel_connector_primary_encoder(to_intel_connector(conn));
6119 if (!encoder->update_prepare)
6120 continue;
6121
6122 crtc = new_conn_state->crtc ?
6123 to_intel_crtc(new_conn_state->crtc) : NULL;
6124 encoder->update_prepare(state, encoder, crtc);
6125 }
6126 }
6127
6128 static void intel_encoders_update_complete(struct intel_atomic_state *state)
6129 {
6130 struct drm_connector_state *old_conn_state;
6131 struct drm_connector_state *new_conn_state;
6132 struct drm_connector *conn;
6133 int i;
6134
6135 for_each_oldnew_connector_in_state(&state->base, conn,
6136 old_conn_state, new_conn_state, i) {
6137 struct intel_encoder *encoder;
6138 struct intel_crtc *crtc;
6139
6140 if (!intel_connector_needs_modeset(state,
6141 old_conn_state,
6142 new_conn_state))
6143 continue;
6144
6145 encoder = intel_connector_primary_encoder(to_intel_connector(conn));
6146 if (!encoder->update_complete)
6147 continue;
6148
6149 crtc = new_conn_state->crtc ?
6150 to_intel_crtc(new_conn_state->crtc) : NULL;
6151 encoder->update_complete(state, encoder, crtc);
6152 }
6153 }
6154
6155 static void intel_encoders_pre_pll_enable(struct intel_crtc *crtc,
6156 struct intel_crtc_state *crtc_state,
6157 struct intel_atomic_state *state)
6158 {
6159 struct drm_connector_state *conn_state;
6160 struct drm_connector *conn;
6161 int i;
6162
6163 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6164 struct intel_encoder *encoder =
6165 to_intel_encoder(conn_state->best_encoder);
6166
6167 if (conn_state->crtc != &crtc->base)
6168 continue;
6169
6170 if (encoder->pre_pll_enable)
6171 encoder->pre_pll_enable(encoder, crtc_state, conn_state);
6172 }
6173 }
6174
6175 static void intel_encoders_pre_enable(struct intel_crtc *crtc,
6176 struct intel_crtc_state *crtc_state,
6177 struct intel_atomic_state *state)
6178 {
6179 struct drm_connector_state *conn_state;
6180 struct drm_connector *conn;
6181 int i;
6182
6183 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6184 struct intel_encoder *encoder =
6185 to_intel_encoder(conn_state->best_encoder);
6186
6187 if (conn_state->crtc != &crtc->base)
6188 continue;
6189
6190 if (encoder->pre_enable)
6191 encoder->pre_enable(encoder, crtc_state, conn_state);
6192 }
6193 }
6194
6195 static void intel_encoders_enable(struct intel_crtc *crtc,
6196 struct intel_crtc_state *crtc_state,
6197 struct intel_atomic_state *state)
6198 {
6199 struct drm_connector_state *conn_state;
6200 struct drm_connector *conn;
6201 int i;
6202
6203 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6204 struct intel_encoder *encoder =
6205 to_intel_encoder(conn_state->best_encoder);
6206
6207 if (conn_state->crtc != &crtc->base)
6208 continue;
6209
6210 if (encoder->enable)
6211 encoder->enable(encoder, crtc_state, conn_state);
6212 intel_opregion_notify_encoder(encoder, true);
6213 }
6214 }
6215
6216 static void intel_encoders_disable(struct intel_crtc *crtc,
6217 struct intel_crtc_state *old_crtc_state,
6218 struct intel_atomic_state *state)
6219 {
6220 struct drm_connector_state *old_conn_state;
6221 struct drm_connector *conn;
6222 int i;
6223
6224 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6225 struct intel_encoder *encoder =
6226 to_intel_encoder(old_conn_state->best_encoder);
6227
6228 if (old_conn_state->crtc != &crtc->base)
6229 continue;
6230
6231 intel_opregion_notify_encoder(encoder, false);
6232 if (encoder->disable)
6233 encoder->disable(encoder, old_crtc_state, old_conn_state);
6234 }
6235 }
6236
6237 static void intel_encoders_post_disable(struct intel_crtc *crtc,
6238 struct intel_crtc_state *old_crtc_state,
6239 struct intel_atomic_state *state)
6240 {
6241 struct drm_connector_state *old_conn_state;
6242 struct drm_connector *conn;
6243 int i;
6244
6245 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6246 struct intel_encoder *encoder =
6247 to_intel_encoder(old_conn_state->best_encoder);
6248
6249 if (old_conn_state->crtc != &crtc->base)
6250 continue;
6251
6252 if (encoder->post_disable)
6253 encoder->post_disable(encoder, old_crtc_state, old_conn_state);
6254 }
6255 }
6256
6257 static void intel_encoders_post_pll_disable(struct intel_crtc *crtc,
6258 struct intel_crtc_state *old_crtc_state,
6259 struct intel_atomic_state *state)
6260 {
6261 struct drm_connector_state *old_conn_state;
6262 struct drm_connector *conn;
6263 int i;
6264
6265 for_each_old_connector_in_state(&state->base, conn, old_conn_state, i) {
6266 struct intel_encoder *encoder =
6267 to_intel_encoder(old_conn_state->best_encoder);
6268
6269 if (old_conn_state->crtc != &crtc->base)
6270 continue;
6271
6272 if (encoder->post_pll_disable)
6273 encoder->post_pll_disable(encoder, old_crtc_state, old_conn_state);
6274 }
6275 }
6276
6277 static void intel_encoders_update_pipe(struct intel_crtc *crtc,
6278 struct intel_crtc_state *crtc_state,
6279 struct intel_atomic_state *state)
6280 {
6281 struct drm_connector_state *conn_state;
6282 struct drm_connector *conn;
6283 int i;
6284
6285 for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
6286 struct intel_encoder *encoder =
6287 to_intel_encoder(conn_state->best_encoder);
6288
6289 if (conn_state->crtc != &crtc->base)
6290 continue;
6291
6292 if (encoder->update_pipe)
6293 encoder->update_pipe(encoder, crtc_state, conn_state);
6294 }
6295 }
6296
6297 static void intel_disable_primary_plane(const struct intel_crtc_state *crtc_state)
6298 {
6299 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
6300 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
6301
6302 plane->disable_plane(plane, crtc_state);
6303 }
6304
6305 static void ironlake_crtc_enable(struct intel_crtc_state *pipe_config,
6306 struct intel_atomic_state *state)
6307 {
6308 struct drm_crtc *crtc = pipe_config->base.crtc;
6309 struct drm_device *dev = crtc->dev;
6310 struct drm_i915_private *dev_priv = to_i915(dev);
6311 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6312 int pipe = intel_crtc->pipe;
6313
6314 if (WARN_ON(intel_crtc->active))
6315 return;
6316
6317
6318
6319
6320
6321
6322
6323
6324
6325
6326
6327 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
6328 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, false);
6329
6330 if (pipe_config->has_pch_encoder)
6331 intel_prepare_shared_dpll(pipe_config);
6332
6333 if (intel_crtc_has_dp_encoder(pipe_config))
6334 intel_dp_set_m_n(pipe_config, M1_N1);
6335
6336 intel_set_pipe_timings(pipe_config);
6337 intel_set_pipe_src_size(pipe_config);
6338
6339 if (pipe_config->has_pch_encoder) {
6340 intel_cpu_transcoder_set_m_n(pipe_config,
6341 &pipe_config->fdi_m_n, NULL);
6342 }
6343
6344 ironlake_set_pipeconf(pipe_config);
6345
6346 intel_crtc->active = true;
6347
6348 intel_encoders_pre_enable(intel_crtc, pipe_config, state);
6349
6350 if (pipe_config->has_pch_encoder) {
6351
6352
6353
6354 ironlake_fdi_pll_enable(pipe_config);
6355 } else {
6356 assert_fdi_tx_disabled(dev_priv, pipe);
6357 assert_fdi_rx_disabled(dev_priv, pipe);
6358 }
6359
6360 ironlake_pfit_enable(pipe_config);
6361
6362
6363
6364
6365
6366 intel_color_load_luts(pipe_config);
6367 intel_color_commit(pipe_config);
6368
6369 intel_disable_primary_plane(pipe_config);
6370
6371 if (dev_priv->display.initial_watermarks != NULL)
6372 dev_priv->display.initial_watermarks(state, pipe_config);
6373 intel_enable_pipe(pipe_config);
6374
6375 if (pipe_config->has_pch_encoder)
6376 ironlake_pch_enable(state, pipe_config);
6377
6378 assert_vblank_disabled(crtc);
6379 intel_crtc_vblank_on(pipe_config);
6380
6381 intel_encoders_enable(intel_crtc, pipe_config, state);
6382
6383 if (HAS_PCH_CPT(dev_priv))
6384 cpt_verify_modeset(dev, intel_crtc->pipe);
6385
6386
6387
6388
6389
6390
6391
6392 if (pipe_config->has_pch_encoder) {
6393 intel_wait_for_vblank(dev_priv, pipe);
6394 intel_wait_for_vblank(dev_priv, pipe);
6395 }
6396 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
6397 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, true);
6398 }
6399
6400
6401 static bool hsw_crtc_supports_ips(struct intel_crtc *crtc)
6402 {
6403 return HAS_IPS(to_i915(crtc->base.dev)) && crtc->pipe == PIPE_A;
6404 }
6405
6406 static void glk_pipe_scaler_clock_gating_wa(struct drm_i915_private *dev_priv,
6407 enum pipe pipe, bool apply)
6408 {
6409 u32 val = I915_READ(CLKGATE_DIS_PSL(pipe));
6410 u32 mask = DPF_GATING_DIS | DPF_RAM_GATING_DIS | DPFR_GATING_DIS;
6411
6412 if (apply)
6413 val |= mask;
6414 else
6415 val &= ~mask;
6416
6417 I915_WRITE(CLKGATE_DIS_PSL(pipe), val);
6418 }
6419
6420 static void icl_pipe_mbus_enable(struct intel_crtc *crtc)
6421 {
6422 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6423 enum pipe pipe = crtc->pipe;
6424 u32 val;
6425
6426 val = MBUS_DBOX_A_CREDIT(2);
6427
6428 if (INTEL_GEN(dev_priv) >= 12) {
6429 val |= MBUS_DBOX_BW_CREDIT(2);
6430 val |= MBUS_DBOX_B_CREDIT(12);
6431 } else {
6432 val |= MBUS_DBOX_BW_CREDIT(1);
6433 val |= MBUS_DBOX_B_CREDIT(8);
6434 }
6435
6436 I915_WRITE(PIPE_MBUS_DBOX_CTL(pipe), val);
6437 }
6438
6439 static void haswell_crtc_enable(struct intel_crtc_state *pipe_config,
6440 struct intel_atomic_state *state)
6441 {
6442 struct drm_crtc *crtc = pipe_config->base.crtc;
6443 struct drm_i915_private *dev_priv = to_i915(crtc->dev);
6444 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6445 int pipe = intel_crtc->pipe, hsw_workaround_pipe;
6446 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
6447 bool psl_clkgate_wa;
6448
6449 if (WARN_ON(intel_crtc->active))
6450 return;
6451
6452 intel_encoders_pre_pll_enable(intel_crtc, pipe_config, state);
6453
6454 if (pipe_config->shared_dpll)
6455 intel_enable_shared_dpll(pipe_config);
6456
6457 intel_encoders_pre_enable(intel_crtc, pipe_config, state);
6458
6459 if (intel_crtc_has_dp_encoder(pipe_config))
6460 intel_dp_set_m_n(pipe_config, M1_N1);
6461
6462 if (!transcoder_is_dsi(cpu_transcoder))
6463 intel_set_pipe_timings(pipe_config);
6464
6465 intel_set_pipe_src_size(pipe_config);
6466
6467 if (cpu_transcoder != TRANSCODER_EDP &&
6468 !transcoder_is_dsi(cpu_transcoder)) {
6469 I915_WRITE(PIPE_MULT(cpu_transcoder),
6470 pipe_config->pixel_multiplier - 1);
6471 }
6472
6473 if (pipe_config->has_pch_encoder) {
6474 intel_cpu_transcoder_set_m_n(pipe_config,
6475 &pipe_config->fdi_m_n, NULL);
6476 }
6477
6478 if (!transcoder_is_dsi(cpu_transcoder))
6479 haswell_set_pipeconf(pipe_config);
6480
6481 if (INTEL_GEN(dev_priv) >= 9 || IS_BROADWELL(dev_priv))
6482 bdw_set_pipemisc(pipe_config);
6483
6484 intel_crtc->active = true;
6485
6486
6487 psl_clkgate_wa = (IS_GEMINILAKE(dev_priv) || IS_CANNONLAKE(dev_priv)) &&
6488 pipe_config->pch_pfit.enabled;
6489 if (psl_clkgate_wa)
6490 glk_pipe_scaler_clock_gating_wa(dev_priv, pipe, true);
6491
6492 if (INTEL_GEN(dev_priv) >= 9)
6493 skylake_pfit_enable(pipe_config);
6494 else
6495 ironlake_pfit_enable(pipe_config);
6496
6497
6498
6499
6500
6501 intel_color_load_luts(pipe_config);
6502 intel_color_commit(pipe_config);
6503
6504 if (INTEL_GEN(dev_priv) < 9)
6505 intel_disable_primary_plane(pipe_config);
6506
6507 if (INTEL_GEN(dev_priv) >= 11)
6508 icl_set_pipe_chicken(intel_crtc);
6509
6510 intel_ddi_set_pipe_settings(pipe_config);
6511 if (!transcoder_is_dsi(cpu_transcoder))
6512 intel_ddi_enable_transcoder_func(pipe_config);
6513
6514 if (dev_priv->display.initial_watermarks != NULL)
6515 dev_priv->display.initial_watermarks(state, pipe_config);
6516
6517 if (INTEL_GEN(dev_priv) >= 11)
6518 icl_pipe_mbus_enable(intel_crtc);
6519
6520
6521 if (!transcoder_is_dsi(cpu_transcoder))
6522 intel_enable_pipe(pipe_config);
6523
6524 if (pipe_config->has_pch_encoder)
6525 lpt_pch_enable(state, pipe_config);
6526
6527 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST))
6528 intel_ddi_set_vc_payload_alloc(pipe_config, true);
6529
6530 assert_vblank_disabled(crtc);
6531 intel_crtc_vblank_on(pipe_config);
6532
6533 intel_encoders_enable(intel_crtc, pipe_config, state);
6534
6535 if (psl_clkgate_wa) {
6536 intel_wait_for_vblank(dev_priv, pipe);
6537 glk_pipe_scaler_clock_gating_wa(dev_priv, pipe, false);
6538 }
6539
6540
6541
6542 hsw_workaround_pipe = pipe_config->hsw_workaround_pipe;
6543 if (IS_HASWELL(dev_priv) && hsw_workaround_pipe != INVALID_PIPE) {
6544 intel_wait_for_vblank(dev_priv, hsw_workaround_pipe);
6545 intel_wait_for_vblank(dev_priv, hsw_workaround_pipe);
6546 }
6547 }
6548
6549 static void ironlake_pfit_disable(const struct intel_crtc_state *old_crtc_state)
6550 {
6551 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
6552 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6553 enum pipe pipe = crtc->pipe;
6554
6555
6556
6557 if (old_crtc_state->pch_pfit.enabled) {
6558 I915_WRITE(PF_CTL(pipe), 0);
6559 I915_WRITE(PF_WIN_POS(pipe), 0);
6560 I915_WRITE(PF_WIN_SZ(pipe), 0);
6561 }
6562 }
6563
6564 static void ironlake_crtc_disable(struct intel_crtc_state *old_crtc_state,
6565 struct intel_atomic_state *state)
6566 {
6567 struct drm_crtc *crtc = old_crtc_state->base.crtc;
6568 struct drm_device *dev = crtc->dev;
6569 struct drm_i915_private *dev_priv = to_i915(dev);
6570 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6571 int pipe = intel_crtc->pipe;
6572
6573
6574
6575
6576
6577
6578 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
6579 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, false);
6580
6581 intel_encoders_disable(intel_crtc, old_crtc_state, state);
6582
6583 drm_crtc_vblank_off(crtc);
6584 assert_vblank_disabled(crtc);
6585
6586 intel_disable_pipe(old_crtc_state);
6587
6588 ironlake_pfit_disable(old_crtc_state);
6589
6590 if (old_crtc_state->has_pch_encoder)
6591 ironlake_fdi_disable(crtc);
6592
6593 intel_encoders_post_disable(intel_crtc, old_crtc_state, state);
6594
6595 if (old_crtc_state->has_pch_encoder) {
6596 ironlake_disable_pch_transcoder(dev_priv, pipe);
6597
6598 if (HAS_PCH_CPT(dev_priv)) {
6599 i915_reg_t reg;
6600 u32 temp;
6601
6602
6603 reg = TRANS_DP_CTL(pipe);
6604 temp = I915_READ(reg);
6605 temp &= ~(TRANS_DP_OUTPUT_ENABLE |
6606 TRANS_DP_PORT_SEL_MASK);
6607 temp |= TRANS_DP_PORT_SEL_NONE;
6608 I915_WRITE(reg, temp);
6609
6610
6611 temp = I915_READ(PCH_DPLL_SEL);
6612 temp &= ~(TRANS_DPLL_ENABLE(pipe) | TRANS_DPLLB_SEL(pipe));
6613 I915_WRITE(PCH_DPLL_SEL, temp);
6614 }
6615
6616 ironlake_fdi_pll_disable(intel_crtc);
6617 }
6618
6619 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
6620 intel_set_pch_fifo_underrun_reporting(dev_priv, pipe, true);
6621 }
6622
6623 static void haswell_crtc_disable(struct intel_crtc_state *old_crtc_state,
6624 struct intel_atomic_state *state)
6625 {
6626 struct drm_crtc *crtc = old_crtc_state->base.crtc;
6627 struct drm_i915_private *dev_priv = to_i915(crtc->dev);
6628 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6629 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
6630
6631 intel_encoders_disable(intel_crtc, old_crtc_state, state);
6632
6633 drm_crtc_vblank_off(crtc);
6634 assert_vblank_disabled(crtc);
6635
6636
6637 if (!transcoder_is_dsi(cpu_transcoder))
6638 intel_disable_pipe(old_crtc_state);
6639
6640 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST))
6641 intel_ddi_set_vc_payload_alloc(old_crtc_state, false);
6642
6643 if (!transcoder_is_dsi(cpu_transcoder))
6644 intel_ddi_disable_transcoder_func(old_crtc_state);
6645
6646 intel_dsc_disable(old_crtc_state);
6647
6648 if (INTEL_GEN(dev_priv) >= 9)
6649 skylake_scaler_disable(intel_crtc);
6650 else
6651 ironlake_pfit_disable(old_crtc_state);
6652
6653 intel_encoders_post_disable(intel_crtc, old_crtc_state, state);
6654
6655 intel_encoders_post_pll_disable(intel_crtc, old_crtc_state, state);
6656 }
6657
6658 static void i9xx_pfit_enable(const struct intel_crtc_state *crtc_state)
6659 {
6660 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
6661 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6662
6663 if (!crtc_state->gmch_pfit.control)
6664 return;
6665
6666
6667
6668
6669
6670 WARN_ON(I915_READ(PFIT_CONTROL) & PFIT_ENABLE);
6671 assert_pipe_disabled(dev_priv, crtc->pipe);
6672
6673 I915_WRITE(PFIT_PGM_RATIOS, crtc_state->gmch_pfit.pgm_ratios);
6674 I915_WRITE(PFIT_CONTROL, crtc_state->gmch_pfit.control);
6675
6676
6677
6678 I915_WRITE(BCLRPAT(crtc->pipe), 0);
6679 }
6680
6681 bool intel_phy_is_combo(struct drm_i915_private *dev_priv, enum phy phy)
6682 {
6683 if (phy == PHY_NONE)
6684 return false;
6685
6686 if (IS_ELKHARTLAKE(dev_priv))
6687 return phy <= PHY_C;
6688
6689 if (INTEL_GEN(dev_priv) >= 11)
6690 return phy <= PHY_B;
6691
6692 return false;
6693 }
6694
6695 bool intel_phy_is_tc(struct drm_i915_private *dev_priv, enum phy phy)
6696 {
6697 if (INTEL_GEN(dev_priv) >= 12)
6698 return phy >= PHY_D && phy <= PHY_I;
6699
6700 if (INTEL_GEN(dev_priv) >= 11 && !IS_ELKHARTLAKE(dev_priv))
6701 return phy >= PHY_C && phy <= PHY_F;
6702
6703 return false;
6704 }
6705
6706 enum phy intel_port_to_phy(struct drm_i915_private *i915, enum port port)
6707 {
6708 if (IS_ELKHARTLAKE(i915) && port == PORT_D)
6709 return PHY_A;
6710
6711 return (enum phy)port;
6712 }
6713
6714 enum tc_port intel_port_to_tc(struct drm_i915_private *dev_priv, enum port port)
6715 {
6716 if (!intel_phy_is_tc(dev_priv, intel_port_to_phy(dev_priv, port)))
6717 return PORT_TC_NONE;
6718
6719 if (INTEL_GEN(dev_priv) >= 12)
6720 return port - PORT_D;
6721
6722 return port - PORT_C;
6723 }
6724
6725 enum intel_display_power_domain intel_port_to_power_domain(enum port port)
6726 {
6727 switch (port) {
6728 case PORT_A:
6729 return POWER_DOMAIN_PORT_DDI_A_LANES;
6730 case PORT_B:
6731 return POWER_DOMAIN_PORT_DDI_B_LANES;
6732 case PORT_C:
6733 return POWER_DOMAIN_PORT_DDI_C_LANES;
6734 case PORT_D:
6735 return POWER_DOMAIN_PORT_DDI_D_LANES;
6736 case PORT_E:
6737 return POWER_DOMAIN_PORT_DDI_E_LANES;
6738 case PORT_F:
6739 return POWER_DOMAIN_PORT_DDI_F_LANES;
6740 default:
6741 MISSING_CASE(port);
6742 return POWER_DOMAIN_PORT_OTHER;
6743 }
6744 }
6745
6746 enum intel_display_power_domain
6747 intel_aux_power_domain(struct intel_digital_port *dig_port)
6748 {
6749 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
6750 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port);
6751
6752 if (intel_phy_is_tc(dev_priv, phy) &&
6753 dig_port->tc_mode == TC_PORT_TBT_ALT) {
6754 switch (dig_port->aux_ch) {
6755 case AUX_CH_C:
6756 return POWER_DOMAIN_AUX_TBT1;
6757 case AUX_CH_D:
6758 return POWER_DOMAIN_AUX_TBT2;
6759 case AUX_CH_E:
6760 return POWER_DOMAIN_AUX_TBT3;
6761 case AUX_CH_F:
6762 return POWER_DOMAIN_AUX_TBT4;
6763 default:
6764 MISSING_CASE(dig_port->aux_ch);
6765 return POWER_DOMAIN_AUX_TBT1;
6766 }
6767 }
6768
6769 switch (dig_port->aux_ch) {
6770 case AUX_CH_A:
6771 return POWER_DOMAIN_AUX_A;
6772 case AUX_CH_B:
6773 return POWER_DOMAIN_AUX_B;
6774 case AUX_CH_C:
6775 return POWER_DOMAIN_AUX_C;
6776 case AUX_CH_D:
6777 return POWER_DOMAIN_AUX_D;
6778 case AUX_CH_E:
6779 return POWER_DOMAIN_AUX_E;
6780 case AUX_CH_F:
6781 return POWER_DOMAIN_AUX_F;
6782 default:
6783 MISSING_CASE(dig_port->aux_ch);
6784 return POWER_DOMAIN_AUX_A;
6785 }
6786 }
6787
6788 static u64 get_crtc_power_domains(struct intel_crtc_state *crtc_state)
6789 {
6790 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
6791 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6792 struct drm_encoder *encoder;
6793 enum pipe pipe = crtc->pipe;
6794 u64 mask;
6795 enum transcoder transcoder = crtc_state->cpu_transcoder;
6796
6797 if (!crtc_state->base.active)
6798 return 0;
6799
6800 mask = BIT_ULL(POWER_DOMAIN_PIPE(pipe));
6801 mask |= BIT_ULL(POWER_DOMAIN_TRANSCODER(transcoder));
6802 if (crtc_state->pch_pfit.enabled ||
6803 crtc_state->pch_pfit.force_thru)
6804 mask |= BIT_ULL(POWER_DOMAIN_PIPE_PANEL_FITTER(pipe));
6805
6806 drm_for_each_encoder_mask(encoder, &dev_priv->drm,
6807 crtc_state->base.encoder_mask) {
6808 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
6809
6810 mask |= BIT_ULL(intel_encoder->power_domain);
6811 }
6812
6813 if (HAS_DDI(dev_priv) && crtc_state->has_audio)
6814 mask |= BIT_ULL(POWER_DOMAIN_AUDIO);
6815
6816 if (crtc_state->shared_dpll)
6817 mask |= BIT_ULL(POWER_DOMAIN_DISPLAY_CORE);
6818
6819 return mask;
6820 }
6821
6822 static u64
6823 modeset_get_crtc_power_domains(struct intel_crtc_state *crtc_state)
6824 {
6825 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
6826 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6827 enum intel_display_power_domain domain;
6828 u64 domains, new_domains, old_domains;
6829
6830 old_domains = crtc->enabled_power_domains;
6831 crtc->enabled_power_domains = new_domains =
6832 get_crtc_power_domains(crtc_state);
6833
6834 domains = new_domains & ~old_domains;
6835
6836 for_each_power_domain(domain, domains)
6837 intel_display_power_get(dev_priv, domain);
6838
6839 return old_domains & ~new_domains;
6840 }
6841
6842 static void modeset_put_power_domains(struct drm_i915_private *dev_priv,
6843 u64 domains)
6844 {
6845 enum intel_display_power_domain domain;
6846
6847 for_each_power_domain(domain, domains)
6848 intel_display_power_put_unchecked(dev_priv, domain);
6849 }
6850
6851 static void valleyview_crtc_enable(struct intel_crtc_state *pipe_config,
6852 struct intel_atomic_state *state)
6853 {
6854 struct drm_crtc *crtc = pipe_config->base.crtc;
6855 struct drm_device *dev = crtc->dev;
6856 struct drm_i915_private *dev_priv = to_i915(dev);
6857 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6858 int pipe = intel_crtc->pipe;
6859
6860 if (WARN_ON(intel_crtc->active))
6861 return;
6862
6863 if (intel_crtc_has_dp_encoder(pipe_config))
6864 intel_dp_set_m_n(pipe_config, M1_N1);
6865
6866 intel_set_pipe_timings(pipe_config);
6867 intel_set_pipe_src_size(pipe_config);
6868
6869 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
6870 I915_WRITE(CHV_BLEND(pipe), CHV_BLEND_LEGACY);
6871 I915_WRITE(CHV_CANVAS(pipe), 0);
6872 }
6873
6874 i9xx_set_pipeconf(pipe_config);
6875
6876 intel_crtc->active = true;
6877
6878 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
6879
6880 intel_encoders_pre_pll_enable(intel_crtc, pipe_config, state);
6881
6882 if (IS_CHERRYVIEW(dev_priv)) {
6883 chv_prepare_pll(intel_crtc, pipe_config);
6884 chv_enable_pll(intel_crtc, pipe_config);
6885 } else {
6886 vlv_prepare_pll(intel_crtc, pipe_config);
6887 vlv_enable_pll(intel_crtc, pipe_config);
6888 }
6889
6890 intel_encoders_pre_enable(intel_crtc, pipe_config, state);
6891
6892 i9xx_pfit_enable(pipe_config);
6893
6894 intel_color_load_luts(pipe_config);
6895 intel_color_commit(pipe_config);
6896
6897 intel_disable_primary_plane(pipe_config);
6898
6899 dev_priv->display.initial_watermarks(state, pipe_config);
6900 intel_enable_pipe(pipe_config);
6901
6902 assert_vblank_disabled(crtc);
6903 intel_crtc_vblank_on(pipe_config);
6904
6905 intel_encoders_enable(intel_crtc, pipe_config, state);
6906 }
6907
6908 static void i9xx_set_pll_dividers(const struct intel_crtc_state *crtc_state)
6909 {
6910 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
6911 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6912
6913 I915_WRITE(FP0(crtc->pipe), crtc_state->dpll_hw_state.fp0);
6914 I915_WRITE(FP1(crtc->pipe), crtc_state->dpll_hw_state.fp1);
6915 }
6916
6917 static void i9xx_crtc_enable(struct intel_crtc_state *pipe_config,
6918 struct intel_atomic_state *state)
6919 {
6920 struct drm_crtc *crtc = pipe_config->base.crtc;
6921 struct drm_device *dev = crtc->dev;
6922 struct drm_i915_private *dev_priv = to_i915(dev);
6923 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6924 enum pipe pipe = intel_crtc->pipe;
6925
6926 if (WARN_ON(intel_crtc->active))
6927 return;
6928
6929 i9xx_set_pll_dividers(pipe_config);
6930
6931 if (intel_crtc_has_dp_encoder(pipe_config))
6932 intel_dp_set_m_n(pipe_config, M1_N1);
6933
6934 intel_set_pipe_timings(pipe_config);
6935 intel_set_pipe_src_size(pipe_config);
6936
6937 i9xx_set_pipeconf(pipe_config);
6938
6939 intel_crtc->active = true;
6940
6941 if (!IS_GEN(dev_priv, 2))
6942 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);
6943
6944 intel_encoders_pre_enable(intel_crtc, pipe_config, state);
6945
6946 i9xx_enable_pll(intel_crtc, pipe_config);
6947
6948 i9xx_pfit_enable(pipe_config);
6949
6950 intel_color_load_luts(pipe_config);
6951 intel_color_commit(pipe_config);
6952
6953 intel_disable_primary_plane(pipe_config);
6954
6955 if (dev_priv->display.initial_watermarks != NULL)
6956 dev_priv->display.initial_watermarks(state,
6957 pipe_config);
6958 else
6959 intel_update_watermarks(intel_crtc);
6960 intel_enable_pipe(pipe_config);
6961
6962 assert_vblank_disabled(crtc);
6963 intel_crtc_vblank_on(pipe_config);
6964
6965 intel_encoders_enable(intel_crtc, pipe_config, state);
6966 }
6967
6968 static void i9xx_pfit_disable(const struct intel_crtc_state *old_crtc_state)
6969 {
6970 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->base.crtc);
6971 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
6972
6973 if (!old_crtc_state->gmch_pfit.control)
6974 return;
6975
6976 assert_pipe_disabled(dev_priv, crtc->pipe);
6977
6978 DRM_DEBUG_KMS("disabling pfit, current: 0x%08x\n",
6979 I915_READ(PFIT_CONTROL));
6980 I915_WRITE(PFIT_CONTROL, 0);
6981 }
6982
6983 static void i9xx_crtc_disable(struct intel_crtc_state *old_crtc_state,
6984 struct intel_atomic_state *state)
6985 {
6986 struct drm_crtc *crtc = old_crtc_state->base.crtc;
6987 struct drm_device *dev = crtc->dev;
6988 struct drm_i915_private *dev_priv = to_i915(dev);
6989 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
6990 int pipe = intel_crtc->pipe;
6991
6992
6993
6994
6995
6996 if (IS_GEN(dev_priv, 2))
6997 intel_wait_for_vblank(dev_priv, pipe);
6998
6999 intel_encoders_disable(intel_crtc, old_crtc_state, state);
7000
7001 drm_crtc_vblank_off(crtc);
7002 assert_vblank_disabled(crtc);
7003
7004 intel_disable_pipe(old_crtc_state);
7005
7006 i9xx_pfit_disable(old_crtc_state);
7007
7008 intel_encoders_post_disable(intel_crtc, old_crtc_state, state);
7009
7010 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DSI)) {
7011 if (IS_CHERRYVIEW(dev_priv))
7012 chv_disable_pll(dev_priv, pipe);
7013 else if (IS_VALLEYVIEW(dev_priv))
7014 vlv_disable_pll(dev_priv, pipe);
7015 else
7016 i9xx_disable_pll(old_crtc_state);
7017 }
7018
7019 intel_encoders_post_pll_disable(intel_crtc, old_crtc_state, state);
7020
7021 if (!IS_GEN(dev_priv, 2))
7022 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, false);
7023
7024 if (!dev_priv->display.initial_watermarks)
7025 intel_update_watermarks(intel_crtc);
7026
7027
7028 if (IS_I830(dev_priv))
7029 i830_enable_pipe(dev_priv, pipe);
7030 }
7031
7032 static void intel_crtc_disable_noatomic(struct drm_crtc *crtc,
7033 struct drm_modeset_acquire_ctx *ctx)
7034 {
7035 struct intel_encoder *encoder;
7036 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
7037 struct drm_i915_private *dev_priv = to_i915(crtc->dev);
7038 struct intel_bw_state *bw_state =
7039 to_intel_bw_state(dev_priv->bw_obj.state);
7040 enum intel_display_power_domain domain;
7041 struct intel_plane *plane;
7042 u64 domains;
7043 struct drm_atomic_state *state;
7044 struct intel_crtc_state *crtc_state;
7045 int ret;
7046
7047 if (!intel_crtc->active)
7048 return;
7049
7050 for_each_intel_plane_on_crtc(&dev_priv->drm, intel_crtc, plane) {
7051 const struct intel_plane_state *plane_state =
7052 to_intel_plane_state(plane->base.state);
7053
7054 if (plane_state->base.visible)
7055 intel_plane_disable_noatomic(intel_crtc, plane);
7056 }
7057
7058 state = drm_atomic_state_alloc(crtc->dev);
7059 if (!state) {
7060 DRM_DEBUG_KMS("failed to disable [CRTC:%d:%s], out of memory",
7061 crtc->base.id, crtc->name);
7062 return;
7063 }
7064
7065 state->acquire_ctx = ctx;
7066
7067
7068 crtc_state = intel_atomic_get_crtc_state(state, intel_crtc);
7069 ret = drm_atomic_add_affected_connectors(state, crtc);
7070
7071 WARN_ON(IS_ERR(crtc_state) || ret);
7072
7073 dev_priv->display.crtc_disable(crtc_state, to_intel_atomic_state(state));
7074
7075 drm_atomic_state_put(state);
7076
7077 DRM_DEBUG_KMS("[CRTC:%d:%s] hw state adjusted, was enabled, now disabled\n",
7078 crtc->base.id, crtc->name);
7079
7080 WARN_ON(drm_atomic_set_mode_for_crtc(crtc->state, NULL) < 0);
7081 crtc->state->active = false;
7082 intel_crtc->active = false;
7083 crtc->enabled = false;
7084 crtc->state->connector_mask = 0;
7085 crtc->state->encoder_mask = 0;
7086
7087 for_each_encoder_on_crtc(crtc->dev, crtc, encoder)
7088 encoder->base.crtc = NULL;
7089
7090 intel_fbc_disable(intel_crtc);
7091 intel_update_watermarks(intel_crtc);
7092 intel_disable_shared_dpll(to_intel_crtc_state(crtc->state));
7093
7094 domains = intel_crtc->enabled_power_domains;
7095 for_each_power_domain(domain, domains)
7096 intel_display_power_put_unchecked(dev_priv, domain);
7097 intel_crtc->enabled_power_domains = 0;
7098
7099 dev_priv->active_crtcs &= ~(1 << intel_crtc->pipe);
7100 dev_priv->min_cdclk[intel_crtc->pipe] = 0;
7101 dev_priv->min_voltage_level[intel_crtc->pipe] = 0;
7102
7103 bw_state->data_rate[intel_crtc->pipe] = 0;
7104 bw_state->num_active_planes[intel_crtc->pipe] = 0;
7105 }
7106
7107
7108
7109
7110
7111 int intel_display_suspend(struct drm_device *dev)
7112 {
7113 struct drm_i915_private *dev_priv = to_i915(dev);
7114 struct drm_atomic_state *state;
7115 int ret;
7116
7117 state = drm_atomic_helper_suspend(dev);
7118 ret = PTR_ERR_OR_ZERO(state);
7119 if (ret)
7120 DRM_ERROR("Suspending crtc's failed with %i\n", ret);
7121 else
7122 dev_priv->modeset_restore_state = state;
7123 return ret;
7124 }
7125
7126 void intel_encoder_destroy(struct drm_encoder *encoder)
7127 {
7128 struct intel_encoder *intel_encoder = to_intel_encoder(encoder);
7129
7130 drm_encoder_cleanup(encoder);
7131 kfree(intel_encoder);
7132 }
7133
7134
7135
7136 static void intel_connector_verify_state(struct intel_crtc_state *crtc_state,
7137 struct drm_connector_state *conn_state)
7138 {
7139 struct intel_connector *connector = to_intel_connector(conn_state->connector);
7140
7141 DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n",
7142 connector->base.base.id,
7143 connector->base.name);
7144
7145 if (connector->get_hw_state(connector)) {
7146 struct intel_encoder *encoder = connector->encoder;
7147
7148 I915_STATE_WARN(!crtc_state,
7149 "connector enabled without attached crtc\n");
7150
7151 if (!crtc_state)
7152 return;
7153
7154 I915_STATE_WARN(!crtc_state->base.active,
7155 "connector is active, but attached crtc isn't\n");
7156
7157 if (!encoder || encoder->type == INTEL_OUTPUT_DP_MST)
7158 return;
7159
7160 I915_STATE_WARN(conn_state->best_encoder != &encoder->base,
7161 "atomic encoder doesn't match attached encoder\n");
7162
7163 I915_STATE_WARN(conn_state->crtc != encoder->base.crtc,
7164 "attached encoder crtc differs from connector crtc\n");
7165 } else {
7166 I915_STATE_WARN(crtc_state && crtc_state->base.active,
7167 "attached crtc is active, but connector isn't\n");
7168 I915_STATE_WARN(!crtc_state && conn_state->best_encoder,
7169 "best encoder set without crtc!\n");
7170 }
7171 }
7172
7173 static int pipe_required_fdi_lanes(struct intel_crtc_state *crtc_state)
7174 {
7175 if (crtc_state->base.enable && crtc_state->has_pch_encoder)
7176 return crtc_state->fdi_lanes;
7177
7178 return 0;
7179 }
7180
7181 static int ironlake_check_fdi_lanes(struct drm_device *dev, enum pipe pipe,
7182 struct intel_crtc_state *pipe_config)
7183 {
7184 struct drm_i915_private *dev_priv = to_i915(dev);
7185 struct drm_atomic_state *state = pipe_config->base.state;
7186 struct intel_crtc *other_crtc;
7187 struct intel_crtc_state *other_crtc_state;
7188
7189 DRM_DEBUG_KMS("checking fdi config on pipe %c, lanes %i\n",
7190 pipe_name(pipe), pipe_config->fdi_lanes);
7191 if (pipe_config->fdi_lanes > 4) {
7192 DRM_DEBUG_KMS("invalid fdi lane config on pipe %c: %i lanes\n",
7193 pipe_name(pipe), pipe_config->fdi_lanes);
7194 return -EINVAL;
7195 }
7196
7197 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
7198 if (pipe_config->fdi_lanes > 2) {
7199 DRM_DEBUG_KMS("only 2 lanes on haswell, required: %i lanes\n",
7200 pipe_config->fdi_lanes);
7201 return -EINVAL;
7202 } else {
7203 return 0;
7204 }
7205 }
7206
7207 if (INTEL_INFO(dev_priv)->num_pipes == 2)
7208 return 0;
7209
7210
7211 switch (pipe) {
7212 case PIPE_A:
7213 return 0;
7214 case PIPE_B:
7215 if (pipe_config->fdi_lanes <= 2)
7216 return 0;
7217
7218 other_crtc = intel_get_crtc_for_pipe(dev_priv, PIPE_C);
7219 other_crtc_state =
7220 intel_atomic_get_crtc_state(state, other_crtc);
7221 if (IS_ERR(other_crtc_state))
7222 return PTR_ERR(other_crtc_state);
7223
7224 if (pipe_required_fdi_lanes(other_crtc_state) > 0) {
7225 DRM_DEBUG_KMS("invalid shared fdi lane config on pipe %c: %i lanes\n",
7226 pipe_name(pipe), pipe_config->fdi_lanes);
7227 return -EINVAL;
7228 }
7229 return 0;
7230 case PIPE_C:
7231 if (pipe_config->fdi_lanes > 2) {
7232 DRM_DEBUG_KMS("only 2 lanes on pipe %c: required %i lanes\n",
7233 pipe_name(pipe), pipe_config->fdi_lanes);
7234 return -EINVAL;
7235 }
7236
7237 other_crtc = intel_get_crtc_for_pipe(dev_priv, PIPE_B);
7238 other_crtc_state =
7239 intel_atomic_get_crtc_state(state, other_crtc);
7240 if (IS_ERR(other_crtc_state))
7241 return PTR_ERR(other_crtc_state);
7242
7243 if (pipe_required_fdi_lanes(other_crtc_state) > 2) {
7244 DRM_DEBUG_KMS("fdi link B uses too many lanes to enable link C\n");
7245 return -EINVAL;
7246 }
7247 return 0;
7248 default:
7249 BUG();
7250 }
7251 }
7252
7253 #define RETRY 1
7254 static int ironlake_fdi_compute_config(struct intel_crtc *intel_crtc,
7255 struct intel_crtc_state *pipe_config)
7256 {
7257 struct drm_device *dev = intel_crtc->base.dev;
7258 const struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
7259 int lane, link_bw, fdi_dotclock, ret;
7260 bool needs_recompute = false;
7261
7262 retry:
7263
7264
7265
7266
7267
7268
7269
7270 link_bw = intel_fdi_link_freq(to_i915(dev), pipe_config);
7271
7272 fdi_dotclock = adjusted_mode->crtc_clock;
7273
7274 lane = ironlake_get_lanes_required(fdi_dotclock, link_bw,
7275 pipe_config->pipe_bpp);
7276
7277 pipe_config->fdi_lanes = lane;
7278
7279 intel_link_compute_m_n(pipe_config->pipe_bpp, lane, fdi_dotclock,
7280 link_bw, &pipe_config->fdi_m_n, false, false);
7281
7282 ret = ironlake_check_fdi_lanes(dev, intel_crtc->pipe, pipe_config);
7283 if (ret == -EDEADLK)
7284 return ret;
7285
7286 if (ret == -EINVAL && pipe_config->pipe_bpp > 6*3) {
7287 pipe_config->pipe_bpp -= 2*3;
7288 DRM_DEBUG_KMS("fdi link bw constraint, reducing pipe bpp to %i\n",
7289 pipe_config->pipe_bpp);
7290 needs_recompute = true;
7291 pipe_config->bw_constrained = true;
7292
7293 goto retry;
7294 }
7295
7296 if (needs_recompute)
7297 return RETRY;
7298
7299 return ret;
7300 }
7301
7302 bool hsw_crtc_state_ips_capable(const struct intel_crtc_state *crtc_state)
7303 {
7304 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
7305 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7306
7307
7308 if (!hsw_crtc_supports_ips(crtc))
7309 return false;
7310
7311 if (!i915_modparams.enable_ips)
7312 return false;
7313
7314 if (crtc_state->pipe_bpp > 24)
7315 return false;
7316
7317
7318
7319
7320
7321
7322
7323
7324 if (IS_BROADWELL(dev_priv) &&
7325 crtc_state->pixel_rate > dev_priv->max_cdclk_freq * 95 / 100)
7326 return false;
7327
7328 return true;
7329 }
7330
7331 static bool hsw_compute_ips_config(struct intel_crtc_state *crtc_state)
7332 {
7333 struct drm_i915_private *dev_priv =
7334 to_i915(crtc_state->base.crtc->dev);
7335 struct intel_atomic_state *intel_state =
7336 to_intel_atomic_state(crtc_state->base.state);
7337
7338 if (!hsw_crtc_state_ips_capable(crtc_state))
7339 return false;
7340
7341
7342
7343
7344
7345
7346
7347 if (crtc_state->crc_enabled)
7348 return false;
7349
7350
7351 if (!(crtc_state->active_planes & ~BIT(PLANE_CURSOR)))
7352 return false;
7353
7354
7355 if (IS_BROADWELL(dev_priv) &&
7356 crtc_state->pixel_rate > intel_state->cdclk.logical.cdclk * 95 / 100)
7357 return false;
7358
7359 return true;
7360 }
7361
7362 static bool intel_crtc_supports_double_wide(const struct intel_crtc *crtc)
7363 {
7364 const struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7365
7366
7367 return INTEL_GEN(dev_priv) < 4 &&
7368 (crtc->pipe == PIPE_A || IS_I915G(dev_priv));
7369 }
7370
7371 static u32 ilk_pipe_pixel_rate(const struct intel_crtc_state *pipe_config)
7372 {
7373 u32 pixel_rate;
7374
7375 pixel_rate = pipe_config->base.adjusted_mode.crtc_clock;
7376
7377
7378
7379
7380
7381
7382 if (pipe_config->pch_pfit.enabled) {
7383 u64 pipe_w, pipe_h, pfit_w, pfit_h;
7384 u32 pfit_size = pipe_config->pch_pfit.size;
7385
7386 pipe_w = pipe_config->pipe_src_w;
7387 pipe_h = pipe_config->pipe_src_h;
7388
7389 pfit_w = (pfit_size >> 16) & 0xFFFF;
7390 pfit_h = pfit_size & 0xFFFF;
7391 if (pipe_w < pfit_w)
7392 pipe_w = pfit_w;
7393 if (pipe_h < pfit_h)
7394 pipe_h = pfit_h;
7395
7396 if (WARN_ON(!pfit_w || !pfit_h))
7397 return pixel_rate;
7398
7399 pixel_rate = div_u64(mul_u32_u32(pixel_rate, pipe_w * pipe_h),
7400 pfit_w * pfit_h);
7401 }
7402
7403 return pixel_rate;
7404 }
7405
7406 static void intel_crtc_compute_pixel_rate(struct intel_crtc_state *crtc_state)
7407 {
7408 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
7409
7410 if (HAS_GMCH(dev_priv))
7411
7412 crtc_state->pixel_rate =
7413 crtc_state->base.adjusted_mode.crtc_clock;
7414 else
7415 crtc_state->pixel_rate =
7416 ilk_pipe_pixel_rate(crtc_state);
7417 }
7418
7419 static int intel_crtc_compute_config(struct intel_crtc *crtc,
7420 struct intel_crtc_state *pipe_config)
7421 {
7422 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7423 const struct drm_display_mode *adjusted_mode = &pipe_config->base.adjusted_mode;
7424 int clock_limit = dev_priv->max_dotclk_freq;
7425
7426 if (INTEL_GEN(dev_priv) < 4) {
7427 clock_limit = dev_priv->max_cdclk_freq * 9 / 10;
7428
7429
7430
7431
7432
7433 if (intel_crtc_supports_double_wide(crtc) &&
7434 adjusted_mode->crtc_clock > clock_limit) {
7435 clock_limit = dev_priv->max_dotclk_freq;
7436 pipe_config->double_wide = true;
7437 }
7438 }
7439
7440 if (adjusted_mode->crtc_clock > clock_limit) {
7441 DRM_DEBUG_KMS("requested pixel clock (%d kHz) too high (max: %d kHz, double wide: %s)\n",
7442 adjusted_mode->crtc_clock, clock_limit,
7443 yesno(pipe_config->double_wide));
7444 return -EINVAL;
7445 }
7446
7447 if ((pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
7448 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) &&
7449 pipe_config->base.ctm) {
7450
7451
7452
7453
7454
7455 DRM_DEBUG_KMS("YCBCR420 and CTM together are not possible\n");
7456 return -EINVAL;
7457 }
7458
7459
7460
7461
7462
7463
7464
7465 if (pipe_config->pipe_src_w & 1) {
7466 if (pipe_config->double_wide) {
7467 DRM_DEBUG_KMS("Odd pipe source width not supported with double wide pipe\n");
7468 return -EINVAL;
7469 }
7470
7471 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_LVDS) &&
7472 intel_is_dual_link_lvds(dev_priv)) {
7473 DRM_DEBUG_KMS("Odd pipe source width not supported with dual link LVDS\n");
7474 return -EINVAL;
7475 }
7476 }
7477
7478
7479
7480
7481 if ((INTEL_GEN(dev_priv) > 4 || IS_G4X(dev_priv)) &&
7482 adjusted_mode->crtc_hsync_start == adjusted_mode->crtc_hdisplay)
7483 return -EINVAL;
7484
7485 intel_crtc_compute_pixel_rate(pipe_config);
7486
7487 if (pipe_config->has_pch_encoder)
7488 return ironlake_fdi_compute_config(crtc, pipe_config);
7489
7490 return 0;
7491 }
7492
7493 static void
7494 intel_reduce_m_n_ratio(u32 *num, u32 *den)
7495 {
7496 while (*num > DATA_LINK_M_N_MASK ||
7497 *den > DATA_LINK_M_N_MASK) {
7498 *num >>= 1;
7499 *den >>= 1;
7500 }
7501 }
7502
7503 static void compute_m_n(unsigned int m, unsigned int n,
7504 u32 *ret_m, u32 *ret_n,
7505 bool constant_n)
7506 {
7507
7508
7509
7510
7511
7512
7513
7514 if (constant_n)
7515 *ret_n = 0x8000;
7516 else
7517 *ret_n = min_t(unsigned int, roundup_pow_of_two(n), DATA_LINK_N_MAX);
7518
7519 *ret_m = div_u64(mul_u32_u32(m, *ret_n), n);
7520 intel_reduce_m_n_ratio(ret_m, ret_n);
7521 }
7522
7523 void
7524 intel_link_compute_m_n(u16 bits_per_pixel, int nlanes,
7525 int pixel_clock, int link_clock,
7526 struct intel_link_m_n *m_n,
7527 bool constant_n, bool fec_enable)
7528 {
7529 u32 data_clock = bits_per_pixel * pixel_clock;
7530
7531 if (fec_enable)
7532 data_clock = intel_dp_mode_to_fec_clock(data_clock);
7533
7534 m_n->tu = 64;
7535 compute_m_n(data_clock,
7536 link_clock * nlanes * 8,
7537 &m_n->gmch_m, &m_n->gmch_n,
7538 constant_n);
7539
7540 compute_m_n(pixel_clock, link_clock,
7541 &m_n->link_m, &m_n->link_n,
7542 constant_n);
7543 }
7544
7545 static inline bool intel_panel_use_ssc(struct drm_i915_private *dev_priv)
7546 {
7547 if (i915_modparams.panel_use_ssc >= 0)
7548 return i915_modparams.panel_use_ssc != 0;
7549 return dev_priv->vbt.lvds_use_ssc
7550 && !(dev_priv->quirks & QUIRK_LVDS_SSC_DISABLE);
7551 }
7552
7553 static u32 pnv_dpll_compute_fp(struct dpll *dpll)
7554 {
7555 return (1 << dpll->n) << 16 | dpll->m2;
7556 }
7557
7558 static u32 i9xx_dpll_compute_fp(struct dpll *dpll)
7559 {
7560 return dpll->n << 16 | dpll->m1 << 8 | dpll->m2;
7561 }
7562
7563 static void i9xx_update_pll_dividers(struct intel_crtc *crtc,
7564 struct intel_crtc_state *crtc_state,
7565 struct dpll *reduced_clock)
7566 {
7567 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7568 u32 fp, fp2 = 0;
7569
7570 if (IS_PINEVIEW(dev_priv)) {
7571 fp = pnv_dpll_compute_fp(&crtc_state->dpll);
7572 if (reduced_clock)
7573 fp2 = pnv_dpll_compute_fp(reduced_clock);
7574 } else {
7575 fp = i9xx_dpll_compute_fp(&crtc_state->dpll);
7576 if (reduced_clock)
7577 fp2 = i9xx_dpll_compute_fp(reduced_clock);
7578 }
7579
7580 crtc_state->dpll_hw_state.fp0 = fp;
7581
7582 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
7583 reduced_clock) {
7584 crtc_state->dpll_hw_state.fp1 = fp2;
7585 } else {
7586 crtc_state->dpll_hw_state.fp1 = fp;
7587 }
7588 }
7589
7590 static void vlv_pllb_recal_opamp(struct drm_i915_private *dev_priv, enum pipe
7591 pipe)
7592 {
7593 u32 reg_val;
7594
7595
7596
7597
7598
7599 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW9(1));
7600 reg_val &= 0xffffff00;
7601 reg_val |= 0x00000030;
7602 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9(1), reg_val);
7603
7604 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_REF_DW13);
7605 reg_val &= 0x00ffffff;
7606 reg_val |= 0x8c000000;
7607 vlv_dpio_write(dev_priv, pipe, VLV_REF_DW13, reg_val);
7608
7609 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW9(1));
7610 reg_val &= 0xffffff00;
7611 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9(1), reg_val);
7612
7613 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_REF_DW13);
7614 reg_val &= 0x00ffffff;
7615 reg_val |= 0xb0000000;
7616 vlv_dpio_write(dev_priv, pipe, VLV_REF_DW13, reg_val);
7617 }
7618
7619 static void intel_pch_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
7620 const struct intel_link_m_n *m_n)
7621 {
7622 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
7623 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7624 enum pipe pipe = crtc->pipe;
7625
7626 I915_WRITE(PCH_TRANS_DATA_M1(pipe), TU_SIZE(m_n->tu) | m_n->gmch_m);
7627 I915_WRITE(PCH_TRANS_DATA_N1(pipe), m_n->gmch_n);
7628 I915_WRITE(PCH_TRANS_LINK_M1(pipe), m_n->link_m);
7629 I915_WRITE(PCH_TRANS_LINK_N1(pipe), m_n->link_n);
7630 }
7631
7632 static bool transcoder_has_m2_n2(struct drm_i915_private *dev_priv,
7633 enum transcoder transcoder)
7634 {
7635 if (IS_HASWELL(dev_priv))
7636 return transcoder == TRANSCODER_EDP;
7637
7638
7639
7640
7641
7642 return IS_GEN(dev_priv, 7) || IS_CHERRYVIEW(dev_priv);
7643 }
7644
7645 static void intel_cpu_transcoder_set_m_n(const struct intel_crtc_state *crtc_state,
7646 const struct intel_link_m_n *m_n,
7647 const struct intel_link_m_n *m2_n2)
7648 {
7649 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
7650 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
7651 enum pipe pipe = crtc->pipe;
7652 enum transcoder transcoder = crtc_state->cpu_transcoder;
7653
7654 if (INTEL_GEN(dev_priv) >= 5) {
7655 I915_WRITE(PIPE_DATA_M1(transcoder), TU_SIZE(m_n->tu) | m_n->gmch_m);
7656 I915_WRITE(PIPE_DATA_N1(transcoder), m_n->gmch_n);
7657 I915_WRITE(PIPE_LINK_M1(transcoder), m_n->link_m);
7658 I915_WRITE(PIPE_LINK_N1(transcoder), m_n->link_n);
7659
7660
7661
7662
7663 if (m2_n2 && crtc_state->has_drrs &&
7664 transcoder_has_m2_n2(dev_priv, transcoder)) {
7665 I915_WRITE(PIPE_DATA_M2(transcoder),
7666 TU_SIZE(m2_n2->tu) | m2_n2->gmch_m);
7667 I915_WRITE(PIPE_DATA_N2(transcoder), m2_n2->gmch_n);
7668 I915_WRITE(PIPE_LINK_M2(transcoder), m2_n2->link_m);
7669 I915_WRITE(PIPE_LINK_N2(transcoder), m2_n2->link_n);
7670 }
7671 } else {
7672 I915_WRITE(PIPE_DATA_M_G4X(pipe), TU_SIZE(m_n->tu) | m_n->gmch_m);
7673 I915_WRITE(PIPE_DATA_N_G4X(pipe), m_n->gmch_n);
7674 I915_WRITE(PIPE_LINK_M_G4X(pipe), m_n->link_m);
7675 I915_WRITE(PIPE_LINK_N_G4X(pipe), m_n->link_n);
7676 }
7677 }
7678
7679 void intel_dp_set_m_n(const struct intel_crtc_state *crtc_state, enum link_m_n_set m_n)
7680 {
7681 const struct intel_link_m_n *dp_m_n, *dp_m2_n2 = NULL;
7682
7683 if (m_n == M1_N1) {
7684 dp_m_n = &crtc_state->dp_m_n;
7685 dp_m2_n2 = &crtc_state->dp_m2_n2;
7686 } else if (m_n == M2_N2) {
7687
7688
7689
7690
7691
7692 dp_m_n = &crtc_state->dp_m2_n2;
7693 } else {
7694 DRM_ERROR("Unsupported divider value\n");
7695 return;
7696 }
7697
7698 if (crtc_state->has_pch_encoder)
7699 intel_pch_transcoder_set_m_n(crtc_state, &crtc_state->dp_m_n);
7700 else
7701 intel_cpu_transcoder_set_m_n(crtc_state, dp_m_n, dp_m2_n2);
7702 }
7703
7704 static void vlv_compute_dpll(struct intel_crtc *crtc,
7705 struct intel_crtc_state *pipe_config)
7706 {
7707 pipe_config->dpll_hw_state.dpll = DPLL_INTEGRATED_REF_CLK_VLV |
7708 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
7709 if (crtc->pipe != PIPE_A)
7710 pipe_config->dpll_hw_state.dpll |= DPLL_INTEGRATED_CRI_CLK_VLV;
7711
7712
7713 if (!intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DSI))
7714 pipe_config->dpll_hw_state.dpll |= DPLL_VCO_ENABLE |
7715 DPLL_EXT_BUFFER_ENABLE_VLV;
7716
7717 pipe_config->dpll_hw_state.dpll_md =
7718 (pipe_config->pixel_multiplier - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT;
7719 }
7720
7721 static void chv_compute_dpll(struct intel_crtc *crtc,
7722 struct intel_crtc_state *pipe_config)
7723 {
7724 pipe_config->dpll_hw_state.dpll = DPLL_SSC_REF_CLK_CHV |
7725 DPLL_REF_CLK_ENABLE_VLV | DPLL_VGA_MODE_DIS;
7726 if (crtc->pipe != PIPE_A)
7727 pipe_config->dpll_hw_state.dpll |= DPLL_INTEGRATED_CRI_CLK_VLV;
7728
7729
7730 if (!intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DSI))
7731 pipe_config->dpll_hw_state.dpll |= DPLL_VCO_ENABLE;
7732
7733 pipe_config->dpll_hw_state.dpll_md =
7734 (pipe_config->pixel_multiplier - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT;
7735 }
7736
7737 static void vlv_prepare_pll(struct intel_crtc *crtc,
7738 const struct intel_crtc_state *pipe_config)
7739 {
7740 struct drm_device *dev = crtc->base.dev;
7741 struct drm_i915_private *dev_priv = to_i915(dev);
7742 enum pipe pipe = crtc->pipe;
7743 u32 mdiv;
7744 u32 bestn, bestm1, bestm2, bestp1, bestp2;
7745 u32 coreclk, reg_val;
7746
7747
7748 I915_WRITE(DPLL(pipe),
7749 pipe_config->dpll_hw_state.dpll &
7750 ~(DPLL_VCO_ENABLE | DPLL_EXT_BUFFER_ENABLE_VLV));
7751
7752
7753 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
7754 return;
7755
7756 vlv_dpio_get(dev_priv);
7757
7758 bestn = pipe_config->dpll.n;
7759 bestm1 = pipe_config->dpll.m1;
7760 bestm2 = pipe_config->dpll.m2;
7761 bestp1 = pipe_config->dpll.p1;
7762 bestp2 = pipe_config->dpll.p2;
7763
7764
7765
7766
7767 if (pipe == PIPE_B)
7768 vlv_pllb_recal_opamp(dev_priv, pipe);
7769
7770
7771 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW9_BCAST, 0x0100000f);
7772
7773
7774 reg_val = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW8(pipe));
7775 reg_val &= 0x00ffffff;
7776 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW8(pipe), reg_val);
7777
7778
7779 vlv_dpio_write(dev_priv, pipe, VLV_CMN_DW0, 0x610);
7780
7781
7782 mdiv = ((bestm1 << DPIO_M1DIV_SHIFT) | (bestm2 & DPIO_M2DIV_MASK));
7783 mdiv |= ((bestp1 << DPIO_P1_SHIFT) | (bestp2 << DPIO_P2_SHIFT));
7784 mdiv |= ((bestn << DPIO_N_SHIFT));
7785 mdiv |= (1 << DPIO_K_SHIFT);
7786
7787
7788
7789
7790
7791
7792 mdiv |= (DPIO_POST_DIV_HDMIDP << DPIO_POST_DIV_SHIFT);
7793 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW3(pipe), mdiv);
7794
7795 mdiv |= DPIO_ENABLE_CALIBRATION;
7796 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW3(pipe), mdiv);
7797
7798
7799 if (pipe_config->port_clock == 162000 ||
7800 intel_crtc_has_type(pipe_config, INTEL_OUTPUT_ANALOG) ||
7801 intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI))
7802 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW10(pipe),
7803 0x009f0003);
7804 else
7805 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW10(pipe),
7806 0x00d0000f);
7807
7808 if (intel_crtc_has_dp_encoder(pipe_config)) {
7809
7810 if (pipe == PIPE_A)
7811 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
7812 0x0df40000);
7813 else
7814 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
7815 0x0df70000);
7816 } else {
7817
7818 if (pipe == PIPE_A)
7819 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
7820 0x0df70000);
7821 else
7822 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW5(pipe),
7823 0x0df40000);
7824 }
7825
7826 coreclk = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW7(pipe));
7827 coreclk = (coreclk & 0x0000ff00) | 0x01c00000;
7828 if (intel_crtc_has_dp_encoder(pipe_config))
7829 coreclk |= 0x01000000;
7830 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW7(pipe), coreclk);
7831
7832 vlv_dpio_write(dev_priv, pipe, VLV_PLL_DW11(pipe), 0x87871000);
7833
7834 vlv_dpio_put(dev_priv);
7835 }
7836
7837 static void chv_prepare_pll(struct intel_crtc *crtc,
7838 const struct intel_crtc_state *pipe_config)
7839 {
7840 struct drm_device *dev = crtc->base.dev;
7841 struct drm_i915_private *dev_priv = to_i915(dev);
7842 enum pipe pipe = crtc->pipe;
7843 enum dpio_channel port = vlv_pipe_to_channel(pipe);
7844 u32 loopfilter, tribuf_calcntr;
7845 u32 bestn, bestm1, bestm2, bestp1, bestp2, bestm2_frac;
7846 u32 dpio_val;
7847 int vco;
7848
7849
7850 I915_WRITE(DPLL(pipe),
7851 pipe_config->dpll_hw_state.dpll & ~DPLL_VCO_ENABLE);
7852
7853
7854 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
7855 return;
7856
7857 bestn = pipe_config->dpll.n;
7858 bestm2_frac = pipe_config->dpll.m2 & 0x3fffff;
7859 bestm1 = pipe_config->dpll.m1;
7860 bestm2 = pipe_config->dpll.m2 >> 22;
7861 bestp1 = pipe_config->dpll.p1;
7862 bestp2 = pipe_config->dpll.p2;
7863 vco = pipe_config->dpll.vco;
7864 dpio_val = 0;
7865 loopfilter = 0;
7866
7867 vlv_dpio_get(dev_priv);
7868
7869
7870 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW13(port),
7871 5 << DPIO_CHV_S1_DIV_SHIFT |
7872 bestp1 << DPIO_CHV_P1_DIV_SHIFT |
7873 bestp2 << DPIO_CHV_P2_DIV_SHIFT |
7874 1 << DPIO_CHV_K_DIV_SHIFT);
7875
7876
7877 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW0(port), bestm2);
7878
7879
7880 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW1(port),
7881 DPIO_CHV_M1_DIV_BY_2 |
7882 1 << DPIO_CHV_N_DIV_SHIFT);
7883
7884
7885 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW2(port), bestm2_frac);
7886
7887
7888 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW3(port));
7889 dpio_val &= ~(DPIO_CHV_FEEDFWD_GAIN_MASK | DPIO_CHV_FRAC_DIV_EN);
7890 dpio_val |= (2 << DPIO_CHV_FEEDFWD_GAIN_SHIFT);
7891 if (bestm2_frac)
7892 dpio_val |= DPIO_CHV_FRAC_DIV_EN;
7893 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW3(port), dpio_val);
7894
7895
7896 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW9(port));
7897 dpio_val &= ~(DPIO_CHV_INT_LOCK_THRESHOLD_MASK |
7898 DPIO_CHV_INT_LOCK_THRESHOLD_SEL_COARSE);
7899 dpio_val |= (0x5 << DPIO_CHV_INT_LOCK_THRESHOLD_SHIFT);
7900 if (!bestm2_frac)
7901 dpio_val |= DPIO_CHV_INT_LOCK_THRESHOLD_SEL_COARSE;
7902 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW9(port), dpio_val);
7903
7904
7905 if (vco == 5400000) {
7906 loopfilter |= (0x3 << DPIO_CHV_PROP_COEFF_SHIFT);
7907 loopfilter |= (0x8 << DPIO_CHV_INT_COEFF_SHIFT);
7908 loopfilter |= (0x1 << DPIO_CHV_GAIN_CTRL_SHIFT);
7909 tribuf_calcntr = 0x9;
7910 } else if (vco <= 6200000) {
7911 loopfilter |= (0x5 << DPIO_CHV_PROP_COEFF_SHIFT);
7912 loopfilter |= (0xB << DPIO_CHV_INT_COEFF_SHIFT);
7913 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
7914 tribuf_calcntr = 0x9;
7915 } else if (vco <= 6480000) {
7916 loopfilter |= (0x4 << DPIO_CHV_PROP_COEFF_SHIFT);
7917 loopfilter |= (0x9 << DPIO_CHV_INT_COEFF_SHIFT);
7918 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
7919 tribuf_calcntr = 0x8;
7920 } else {
7921
7922 loopfilter |= (0x4 << DPIO_CHV_PROP_COEFF_SHIFT);
7923 loopfilter |= (0x9 << DPIO_CHV_INT_COEFF_SHIFT);
7924 loopfilter |= (0x3 << DPIO_CHV_GAIN_CTRL_SHIFT);
7925 tribuf_calcntr = 0;
7926 }
7927 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW6(port), loopfilter);
7928
7929 dpio_val = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW8(port));
7930 dpio_val &= ~DPIO_CHV_TDC_TARGET_CNT_MASK;
7931 dpio_val |= (tribuf_calcntr << DPIO_CHV_TDC_TARGET_CNT_SHIFT);
7932 vlv_dpio_write(dev_priv, pipe, CHV_PLL_DW8(port), dpio_val);
7933
7934
7935 vlv_dpio_write(dev_priv, pipe, CHV_CMN_DW14(port),
7936 vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW14(port)) |
7937 DPIO_AFC_RECAL);
7938
7939 vlv_dpio_put(dev_priv);
7940 }
7941
7942
7943
7944
7945
7946
7947
7948
7949
7950
7951
7952 int vlv_force_pll_on(struct drm_i915_private *dev_priv, enum pipe pipe,
7953 const struct dpll *dpll)
7954 {
7955 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
7956 struct intel_crtc_state *pipe_config;
7957
7958 pipe_config = kzalloc(sizeof(*pipe_config), GFP_KERNEL);
7959 if (!pipe_config)
7960 return -ENOMEM;
7961
7962 pipe_config->base.crtc = &crtc->base;
7963 pipe_config->pixel_multiplier = 1;
7964 pipe_config->dpll = *dpll;
7965
7966 if (IS_CHERRYVIEW(dev_priv)) {
7967 chv_compute_dpll(crtc, pipe_config);
7968 chv_prepare_pll(crtc, pipe_config);
7969 chv_enable_pll(crtc, pipe_config);
7970 } else {
7971 vlv_compute_dpll(crtc, pipe_config);
7972 vlv_prepare_pll(crtc, pipe_config);
7973 vlv_enable_pll(crtc, pipe_config);
7974 }
7975
7976 kfree(pipe_config);
7977
7978 return 0;
7979 }
7980
7981
7982
7983
7984
7985
7986
7987
7988
7989 void vlv_force_pll_off(struct drm_i915_private *dev_priv, enum pipe pipe)
7990 {
7991 if (IS_CHERRYVIEW(dev_priv))
7992 chv_disable_pll(dev_priv, pipe);
7993 else
7994 vlv_disable_pll(dev_priv, pipe);
7995 }
7996
7997 static void i9xx_compute_dpll(struct intel_crtc *crtc,
7998 struct intel_crtc_state *crtc_state,
7999 struct dpll *reduced_clock)
8000 {
8001 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8002 u32 dpll;
8003 struct dpll *clock = &crtc_state->dpll;
8004
8005 i9xx_update_pll_dividers(crtc, crtc_state, reduced_clock);
8006
8007 dpll = DPLL_VGA_MODE_DIS;
8008
8009 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS))
8010 dpll |= DPLLB_MODE_LVDS;
8011 else
8012 dpll |= DPLLB_MODE_DAC_SERIAL;
8013
8014 if (IS_I945G(dev_priv) || IS_I945GM(dev_priv) ||
8015 IS_G33(dev_priv) || IS_PINEVIEW(dev_priv)) {
8016 dpll |= (crtc_state->pixel_multiplier - 1)
8017 << SDVO_MULTIPLIER_SHIFT_HIRES;
8018 }
8019
8020 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO) ||
8021 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
8022 dpll |= DPLL_SDVO_HIGH_SPEED;
8023
8024 if (intel_crtc_has_dp_encoder(crtc_state))
8025 dpll |= DPLL_SDVO_HIGH_SPEED;
8026
8027
8028 if (IS_PINEVIEW(dev_priv))
8029 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW;
8030 else {
8031 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8032 if (IS_G4X(dev_priv) && reduced_clock)
8033 dpll |= (1 << (reduced_clock->p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
8034 }
8035 switch (clock->p2) {
8036 case 5:
8037 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
8038 break;
8039 case 7:
8040 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
8041 break;
8042 case 10:
8043 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
8044 break;
8045 case 14:
8046 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
8047 break;
8048 }
8049 if (INTEL_GEN(dev_priv) >= 4)
8050 dpll |= (6 << PLL_LOAD_PULSE_PHASE_SHIFT);
8051
8052 if (crtc_state->sdvo_tv_clock)
8053 dpll |= PLL_REF_INPUT_TVCLKINBC;
8054 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
8055 intel_panel_use_ssc(dev_priv))
8056 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
8057 else
8058 dpll |= PLL_REF_INPUT_DREFCLK;
8059
8060 dpll |= DPLL_VCO_ENABLE;
8061 crtc_state->dpll_hw_state.dpll = dpll;
8062
8063 if (INTEL_GEN(dev_priv) >= 4) {
8064 u32 dpll_md = (crtc_state->pixel_multiplier - 1)
8065 << DPLL_MD_UDI_MULTIPLIER_SHIFT;
8066 crtc_state->dpll_hw_state.dpll_md = dpll_md;
8067 }
8068 }
8069
8070 static void i8xx_compute_dpll(struct intel_crtc *crtc,
8071 struct intel_crtc_state *crtc_state,
8072 struct dpll *reduced_clock)
8073 {
8074 struct drm_device *dev = crtc->base.dev;
8075 struct drm_i915_private *dev_priv = to_i915(dev);
8076 u32 dpll;
8077 struct dpll *clock = &crtc_state->dpll;
8078
8079 i9xx_update_pll_dividers(crtc, crtc_state, reduced_clock);
8080
8081 dpll = DPLL_VGA_MODE_DIS;
8082
8083 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8084 dpll |= (1 << (clock->p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8085 } else {
8086 if (clock->p1 == 2)
8087 dpll |= PLL_P1_DIVIDE_BY_TWO;
8088 else
8089 dpll |= (clock->p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT;
8090 if (clock->p2 == 4)
8091 dpll |= PLL_P2_DIVIDE_BY_4;
8092 }
8093
8094
8095
8096
8097
8098
8099
8100
8101
8102
8103
8104
8105
8106 if (IS_I830(dev_priv) ||
8107 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DVO))
8108 dpll |= DPLL_DVO_2X_MODE;
8109
8110 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
8111 intel_panel_use_ssc(dev_priv))
8112 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
8113 else
8114 dpll |= PLL_REF_INPUT_DREFCLK;
8115
8116 dpll |= DPLL_VCO_ENABLE;
8117 crtc_state->dpll_hw_state.dpll = dpll;
8118 }
8119
8120 static void intel_set_pipe_timings(const struct intel_crtc_state *crtc_state)
8121 {
8122 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
8123 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8124 enum pipe pipe = crtc->pipe;
8125 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
8126 const struct drm_display_mode *adjusted_mode = &crtc_state->base.adjusted_mode;
8127 u32 crtc_vtotal, crtc_vblank_end;
8128 int vsyncshift = 0;
8129
8130
8131
8132 crtc_vtotal = adjusted_mode->crtc_vtotal;
8133 crtc_vblank_end = adjusted_mode->crtc_vblank_end;
8134
8135 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
8136
8137 crtc_vtotal -= 1;
8138 crtc_vblank_end -= 1;
8139
8140 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
8141 vsyncshift = (adjusted_mode->crtc_htotal - 1) / 2;
8142 else
8143 vsyncshift = adjusted_mode->crtc_hsync_start -
8144 adjusted_mode->crtc_htotal / 2;
8145 if (vsyncshift < 0)
8146 vsyncshift += adjusted_mode->crtc_htotal;
8147 }
8148
8149 if (INTEL_GEN(dev_priv) > 3)
8150 I915_WRITE(VSYNCSHIFT(cpu_transcoder), vsyncshift);
8151
8152 I915_WRITE(HTOTAL(cpu_transcoder),
8153 (adjusted_mode->crtc_hdisplay - 1) |
8154 ((adjusted_mode->crtc_htotal - 1) << 16));
8155 I915_WRITE(HBLANK(cpu_transcoder),
8156 (adjusted_mode->crtc_hblank_start - 1) |
8157 ((adjusted_mode->crtc_hblank_end - 1) << 16));
8158 I915_WRITE(HSYNC(cpu_transcoder),
8159 (adjusted_mode->crtc_hsync_start - 1) |
8160 ((adjusted_mode->crtc_hsync_end - 1) << 16));
8161
8162 I915_WRITE(VTOTAL(cpu_transcoder),
8163 (adjusted_mode->crtc_vdisplay - 1) |
8164 ((crtc_vtotal - 1) << 16));
8165 I915_WRITE(VBLANK(cpu_transcoder),
8166 (adjusted_mode->crtc_vblank_start - 1) |
8167 ((crtc_vblank_end - 1) << 16));
8168 I915_WRITE(VSYNC(cpu_transcoder),
8169 (adjusted_mode->crtc_vsync_start - 1) |
8170 ((adjusted_mode->crtc_vsync_end - 1) << 16));
8171
8172
8173
8174
8175
8176 if (IS_HASWELL(dev_priv) && cpu_transcoder == TRANSCODER_EDP &&
8177 (pipe == PIPE_B || pipe == PIPE_C))
8178 I915_WRITE(VTOTAL(pipe), I915_READ(VTOTAL(cpu_transcoder)));
8179
8180 }
8181
8182 static void intel_set_pipe_src_size(const struct intel_crtc_state *crtc_state)
8183 {
8184 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
8185 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8186 enum pipe pipe = crtc->pipe;
8187
8188
8189
8190
8191 I915_WRITE(PIPESRC(pipe),
8192 ((crtc_state->pipe_src_w - 1) << 16) |
8193 (crtc_state->pipe_src_h - 1));
8194 }
8195
8196 static void intel_get_pipe_timings(struct intel_crtc *crtc,
8197 struct intel_crtc_state *pipe_config)
8198 {
8199 struct drm_device *dev = crtc->base.dev;
8200 struct drm_i915_private *dev_priv = to_i915(dev);
8201 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
8202 u32 tmp;
8203
8204 tmp = I915_READ(HTOTAL(cpu_transcoder));
8205 pipe_config->base.adjusted_mode.crtc_hdisplay = (tmp & 0xffff) + 1;
8206 pipe_config->base.adjusted_mode.crtc_htotal = ((tmp >> 16) & 0xffff) + 1;
8207
8208 if (!transcoder_is_dsi(cpu_transcoder)) {
8209 tmp = I915_READ(HBLANK(cpu_transcoder));
8210 pipe_config->base.adjusted_mode.crtc_hblank_start =
8211 (tmp & 0xffff) + 1;
8212 pipe_config->base.adjusted_mode.crtc_hblank_end =
8213 ((tmp >> 16) & 0xffff) + 1;
8214 }
8215 tmp = I915_READ(HSYNC(cpu_transcoder));
8216 pipe_config->base.adjusted_mode.crtc_hsync_start = (tmp & 0xffff) + 1;
8217 pipe_config->base.adjusted_mode.crtc_hsync_end = ((tmp >> 16) & 0xffff) + 1;
8218
8219 tmp = I915_READ(VTOTAL(cpu_transcoder));
8220 pipe_config->base.adjusted_mode.crtc_vdisplay = (tmp & 0xffff) + 1;
8221 pipe_config->base.adjusted_mode.crtc_vtotal = ((tmp >> 16) & 0xffff) + 1;
8222
8223 if (!transcoder_is_dsi(cpu_transcoder)) {
8224 tmp = I915_READ(VBLANK(cpu_transcoder));
8225 pipe_config->base.adjusted_mode.crtc_vblank_start =
8226 (tmp & 0xffff) + 1;
8227 pipe_config->base.adjusted_mode.crtc_vblank_end =
8228 ((tmp >> 16) & 0xffff) + 1;
8229 }
8230 tmp = I915_READ(VSYNC(cpu_transcoder));
8231 pipe_config->base.adjusted_mode.crtc_vsync_start = (tmp & 0xffff) + 1;
8232 pipe_config->base.adjusted_mode.crtc_vsync_end = ((tmp >> 16) & 0xffff) + 1;
8233
8234 if (I915_READ(PIPECONF(cpu_transcoder)) & PIPECONF_INTERLACE_MASK) {
8235 pipe_config->base.adjusted_mode.flags |= DRM_MODE_FLAG_INTERLACE;
8236 pipe_config->base.adjusted_mode.crtc_vtotal += 1;
8237 pipe_config->base.adjusted_mode.crtc_vblank_end += 1;
8238 }
8239 }
8240
8241 static void intel_get_pipe_src_size(struct intel_crtc *crtc,
8242 struct intel_crtc_state *pipe_config)
8243 {
8244 struct drm_device *dev = crtc->base.dev;
8245 struct drm_i915_private *dev_priv = to_i915(dev);
8246 u32 tmp;
8247
8248 tmp = I915_READ(PIPESRC(crtc->pipe));
8249 pipe_config->pipe_src_h = (tmp & 0xffff) + 1;
8250 pipe_config->pipe_src_w = ((tmp >> 16) & 0xffff) + 1;
8251
8252 pipe_config->base.mode.vdisplay = pipe_config->pipe_src_h;
8253 pipe_config->base.mode.hdisplay = pipe_config->pipe_src_w;
8254 }
8255
8256 void intel_mode_from_pipe_config(struct drm_display_mode *mode,
8257 struct intel_crtc_state *pipe_config)
8258 {
8259 mode->hdisplay = pipe_config->base.adjusted_mode.crtc_hdisplay;
8260 mode->htotal = pipe_config->base.adjusted_mode.crtc_htotal;
8261 mode->hsync_start = pipe_config->base.adjusted_mode.crtc_hsync_start;
8262 mode->hsync_end = pipe_config->base.adjusted_mode.crtc_hsync_end;
8263
8264 mode->vdisplay = pipe_config->base.adjusted_mode.crtc_vdisplay;
8265 mode->vtotal = pipe_config->base.adjusted_mode.crtc_vtotal;
8266 mode->vsync_start = pipe_config->base.adjusted_mode.crtc_vsync_start;
8267 mode->vsync_end = pipe_config->base.adjusted_mode.crtc_vsync_end;
8268
8269 mode->flags = pipe_config->base.adjusted_mode.flags;
8270 mode->type = DRM_MODE_TYPE_DRIVER;
8271
8272 mode->clock = pipe_config->base.adjusted_mode.crtc_clock;
8273
8274 mode->hsync = drm_mode_hsync(mode);
8275 mode->vrefresh = drm_mode_vrefresh(mode);
8276 drm_mode_set_name(mode);
8277 }
8278
8279 static void i9xx_set_pipeconf(const struct intel_crtc_state *crtc_state)
8280 {
8281 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
8282 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8283 u32 pipeconf;
8284
8285 pipeconf = 0;
8286
8287
8288 if (IS_I830(dev_priv))
8289 pipeconf |= I915_READ(PIPECONF(crtc->pipe)) & PIPECONF_ENABLE;
8290
8291 if (crtc_state->double_wide)
8292 pipeconf |= PIPECONF_DOUBLE_WIDE;
8293
8294
8295 if (IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
8296 IS_CHERRYVIEW(dev_priv)) {
8297
8298 if (crtc_state->dither && crtc_state->pipe_bpp != 30)
8299 pipeconf |= PIPECONF_DITHER_EN |
8300 PIPECONF_DITHER_TYPE_SP;
8301
8302 switch (crtc_state->pipe_bpp) {
8303 case 18:
8304 pipeconf |= PIPECONF_6BPC;
8305 break;
8306 case 24:
8307 pipeconf |= PIPECONF_8BPC;
8308 break;
8309 case 30:
8310 pipeconf |= PIPECONF_10BPC;
8311 break;
8312 default:
8313
8314 BUG();
8315 }
8316 }
8317
8318 if (crtc_state->base.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE) {
8319 if (INTEL_GEN(dev_priv) < 4 ||
8320 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO))
8321 pipeconf |= PIPECONF_INTERLACE_W_FIELD_INDICATION;
8322 else
8323 pipeconf |= PIPECONF_INTERLACE_W_SYNC_SHIFT;
8324 } else {
8325 pipeconf |= PIPECONF_PROGRESSIVE;
8326 }
8327
8328 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
8329 crtc_state->limited_color_range)
8330 pipeconf |= PIPECONF_COLOR_RANGE_SELECT;
8331
8332 pipeconf |= PIPECONF_GAMMA_MODE(crtc_state->gamma_mode);
8333
8334 I915_WRITE(PIPECONF(crtc->pipe), pipeconf);
8335 POSTING_READ(PIPECONF(crtc->pipe));
8336 }
8337
8338 static int i8xx_crtc_compute_clock(struct intel_crtc *crtc,
8339 struct intel_crtc_state *crtc_state)
8340 {
8341 struct drm_device *dev = crtc->base.dev;
8342 struct drm_i915_private *dev_priv = to_i915(dev);
8343 const struct intel_limit *limit;
8344 int refclk = 48000;
8345
8346 memset(&crtc_state->dpll_hw_state, 0,
8347 sizeof(crtc_state->dpll_hw_state));
8348
8349 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8350 if (intel_panel_use_ssc(dev_priv)) {
8351 refclk = dev_priv->vbt.lvds_ssc_freq;
8352 DRM_DEBUG_KMS("using SSC reference clock of %d kHz\n", refclk);
8353 }
8354
8355 limit = &intel_limits_i8xx_lvds;
8356 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DVO)) {
8357 limit = &intel_limits_i8xx_dvo;
8358 } else {
8359 limit = &intel_limits_i8xx_dac;
8360 }
8361
8362 if (!crtc_state->clock_set &&
8363 !i9xx_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8364 refclk, NULL, &crtc_state->dpll)) {
8365 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8366 return -EINVAL;
8367 }
8368
8369 i8xx_compute_dpll(crtc, crtc_state, NULL);
8370
8371 return 0;
8372 }
8373
8374 static int g4x_crtc_compute_clock(struct intel_crtc *crtc,
8375 struct intel_crtc_state *crtc_state)
8376 {
8377 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8378 const struct intel_limit *limit;
8379 int refclk = 96000;
8380
8381 memset(&crtc_state->dpll_hw_state, 0,
8382 sizeof(crtc_state->dpll_hw_state));
8383
8384 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8385 if (intel_panel_use_ssc(dev_priv)) {
8386 refclk = dev_priv->vbt.lvds_ssc_freq;
8387 DRM_DEBUG_KMS("using SSC reference clock of %d kHz\n", refclk);
8388 }
8389
8390 if (intel_is_dual_link_lvds(dev_priv))
8391 limit = &intel_limits_g4x_dual_channel_lvds;
8392 else
8393 limit = &intel_limits_g4x_single_channel_lvds;
8394 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) ||
8395 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) {
8396 limit = &intel_limits_g4x_hdmi;
8397 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO)) {
8398 limit = &intel_limits_g4x_sdvo;
8399 } else {
8400
8401 limit = &intel_limits_i9xx_sdvo;
8402 }
8403
8404 if (!crtc_state->clock_set &&
8405 !g4x_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8406 refclk, NULL, &crtc_state->dpll)) {
8407 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8408 return -EINVAL;
8409 }
8410
8411 i9xx_compute_dpll(crtc, crtc_state, NULL);
8412
8413 return 0;
8414 }
8415
8416 static int pnv_crtc_compute_clock(struct intel_crtc *crtc,
8417 struct intel_crtc_state *crtc_state)
8418 {
8419 struct drm_device *dev = crtc->base.dev;
8420 struct drm_i915_private *dev_priv = to_i915(dev);
8421 const struct intel_limit *limit;
8422 int refclk = 96000;
8423
8424 memset(&crtc_state->dpll_hw_state, 0,
8425 sizeof(crtc_state->dpll_hw_state));
8426
8427 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8428 if (intel_panel_use_ssc(dev_priv)) {
8429 refclk = dev_priv->vbt.lvds_ssc_freq;
8430 DRM_DEBUG_KMS("using SSC reference clock of %d kHz\n", refclk);
8431 }
8432
8433 limit = &intel_limits_pineview_lvds;
8434 } else {
8435 limit = &intel_limits_pineview_sdvo;
8436 }
8437
8438 if (!crtc_state->clock_set &&
8439 !pnv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8440 refclk, NULL, &crtc_state->dpll)) {
8441 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8442 return -EINVAL;
8443 }
8444
8445 i9xx_compute_dpll(crtc, crtc_state, NULL);
8446
8447 return 0;
8448 }
8449
8450 static int i9xx_crtc_compute_clock(struct intel_crtc *crtc,
8451 struct intel_crtc_state *crtc_state)
8452 {
8453 struct drm_device *dev = crtc->base.dev;
8454 struct drm_i915_private *dev_priv = to_i915(dev);
8455 const struct intel_limit *limit;
8456 int refclk = 96000;
8457
8458 memset(&crtc_state->dpll_hw_state, 0,
8459 sizeof(crtc_state->dpll_hw_state));
8460
8461 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
8462 if (intel_panel_use_ssc(dev_priv)) {
8463 refclk = dev_priv->vbt.lvds_ssc_freq;
8464 DRM_DEBUG_KMS("using SSC reference clock of %d kHz\n", refclk);
8465 }
8466
8467 limit = &intel_limits_i9xx_lvds;
8468 } else {
8469 limit = &intel_limits_i9xx_sdvo;
8470 }
8471
8472 if (!crtc_state->clock_set &&
8473 !i9xx_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8474 refclk, NULL, &crtc_state->dpll)) {
8475 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8476 return -EINVAL;
8477 }
8478
8479 i9xx_compute_dpll(crtc, crtc_state, NULL);
8480
8481 return 0;
8482 }
8483
8484 static int chv_crtc_compute_clock(struct intel_crtc *crtc,
8485 struct intel_crtc_state *crtc_state)
8486 {
8487 int refclk = 100000;
8488 const struct intel_limit *limit = &intel_limits_chv;
8489
8490 memset(&crtc_state->dpll_hw_state, 0,
8491 sizeof(crtc_state->dpll_hw_state));
8492
8493 if (!crtc_state->clock_set &&
8494 !chv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8495 refclk, NULL, &crtc_state->dpll)) {
8496 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8497 return -EINVAL;
8498 }
8499
8500 chv_compute_dpll(crtc, crtc_state);
8501
8502 return 0;
8503 }
8504
8505 static int vlv_crtc_compute_clock(struct intel_crtc *crtc,
8506 struct intel_crtc_state *crtc_state)
8507 {
8508 int refclk = 100000;
8509 const struct intel_limit *limit = &intel_limits_vlv;
8510
8511 memset(&crtc_state->dpll_hw_state, 0,
8512 sizeof(crtc_state->dpll_hw_state));
8513
8514 if (!crtc_state->clock_set &&
8515 !vlv_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
8516 refclk, NULL, &crtc_state->dpll)) {
8517 DRM_ERROR("Couldn't find PLL settings for mode!\n");
8518 return -EINVAL;
8519 }
8520
8521 vlv_compute_dpll(crtc, crtc_state);
8522
8523 return 0;
8524 }
8525
8526 static bool i9xx_has_pfit(struct drm_i915_private *dev_priv)
8527 {
8528 if (IS_I830(dev_priv))
8529 return false;
8530
8531 return INTEL_GEN(dev_priv) >= 4 ||
8532 IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv);
8533 }
8534
8535 static void i9xx_get_pfit_config(struct intel_crtc *crtc,
8536 struct intel_crtc_state *pipe_config)
8537 {
8538 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8539 u32 tmp;
8540
8541 if (!i9xx_has_pfit(dev_priv))
8542 return;
8543
8544 tmp = I915_READ(PFIT_CONTROL);
8545 if (!(tmp & PFIT_ENABLE))
8546 return;
8547
8548
8549 if (INTEL_GEN(dev_priv) < 4) {
8550 if (crtc->pipe != PIPE_B)
8551 return;
8552 } else {
8553 if ((tmp & PFIT_PIPE_MASK) != (crtc->pipe << PFIT_PIPE_SHIFT))
8554 return;
8555 }
8556
8557 pipe_config->gmch_pfit.control = tmp;
8558 pipe_config->gmch_pfit.pgm_ratios = I915_READ(PFIT_PGM_RATIOS);
8559 }
8560
8561 static void vlv_crtc_clock_get(struct intel_crtc *crtc,
8562 struct intel_crtc_state *pipe_config)
8563 {
8564 struct drm_device *dev = crtc->base.dev;
8565 struct drm_i915_private *dev_priv = to_i915(dev);
8566 int pipe = pipe_config->cpu_transcoder;
8567 struct dpll clock;
8568 u32 mdiv;
8569 int refclk = 100000;
8570
8571
8572 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
8573 return;
8574
8575 vlv_dpio_get(dev_priv);
8576 mdiv = vlv_dpio_read(dev_priv, pipe, VLV_PLL_DW3(pipe));
8577 vlv_dpio_put(dev_priv);
8578
8579 clock.m1 = (mdiv >> DPIO_M1DIV_SHIFT) & 7;
8580 clock.m2 = mdiv & DPIO_M2DIV_MASK;
8581 clock.n = (mdiv >> DPIO_N_SHIFT) & 0xf;
8582 clock.p1 = (mdiv >> DPIO_P1_SHIFT) & 7;
8583 clock.p2 = (mdiv >> DPIO_P2_SHIFT) & 0x1f;
8584
8585 pipe_config->port_clock = vlv_calc_dpll_params(refclk, &clock);
8586 }
8587
8588 static void
8589 i9xx_get_initial_plane_config(struct intel_crtc *crtc,
8590 struct intel_initial_plane_config *plane_config)
8591 {
8592 struct drm_device *dev = crtc->base.dev;
8593 struct drm_i915_private *dev_priv = to_i915(dev);
8594 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
8595 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
8596 enum pipe pipe;
8597 u32 val, base, offset;
8598 int fourcc, pixel_format;
8599 unsigned int aligned_height;
8600 struct drm_framebuffer *fb;
8601 struct intel_framebuffer *intel_fb;
8602
8603 if (!plane->get_hw_state(plane, &pipe))
8604 return;
8605
8606 WARN_ON(pipe != crtc->pipe);
8607
8608 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
8609 if (!intel_fb) {
8610 DRM_DEBUG_KMS("failed to alloc fb\n");
8611 return;
8612 }
8613
8614 fb = &intel_fb->base;
8615
8616 fb->dev = dev;
8617
8618 val = I915_READ(DSPCNTR(i9xx_plane));
8619
8620 if (INTEL_GEN(dev_priv) >= 4) {
8621 if (val & DISPPLANE_TILED) {
8622 plane_config->tiling = I915_TILING_X;
8623 fb->modifier = I915_FORMAT_MOD_X_TILED;
8624 }
8625
8626 if (val & DISPPLANE_ROTATE_180)
8627 plane_config->rotation = DRM_MODE_ROTATE_180;
8628 }
8629
8630 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B &&
8631 val & DISPPLANE_MIRROR)
8632 plane_config->rotation |= DRM_MODE_REFLECT_X;
8633
8634 pixel_format = val & DISPPLANE_PIXFORMAT_MASK;
8635 fourcc = i9xx_format_to_fourcc(pixel_format);
8636 fb->format = drm_format_info(fourcc);
8637
8638 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
8639 offset = I915_READ(DSPOFFSET(i9xx_plane));
8640 base = I915_READ(DSPSURF(i9xx_plane)) & 0xfffff000;
8641 } else if (INTEL_GEN(dev_priv) >= 4) {
8642 if (plane_config->tiling)
8643 offset = I915_READ(DSPTILEOFF(i9xx_plane));
8644 else
8645 offset = I915_READ(DSPLINOFF(i9xx_plane));
8646 base = I915_READ(DSPSURF(i9xx_plane)) & 0xfffff000;
8647 } else {
8648 base = I915_READ(DSPADDR(i9xx_plane));
8649 }
8650 plane_config->base = base;
8651
8652 val = I915_READ(PIPESRC(pipe));
8653 fb->width = ((val >> 16) & 0xfff) + 1;
8654 fb->height = ((val >> 0) & 0xfff) + 1;
8655
8656 val = I915_READ(DSPSTRIDE(i9xx_plane));
8657 fb->pitches[0] = val & 0xffffffc0;
8658
8659 aligned_height = intel_fb_align_height(fb, 0, fb->height);
8660
8661 plane_config->size = fb->pitches[0] * aligned_height;
8662
8663 DRM_DEBUG_KMS("%s/%s with fb: size=%dx%d@%d, offset=%x, pitch %d, size 0x%x\n",
8664 crtc->base.name, plane->base.name, fb->width, fb->height,
8665 fb->format->cpp[0] * 8, base, fb->pitches[0],
8666 plane_config->size);
8667
8668 plane_config->fb = intel_fb;
8669 }
8670
8671 static void chv_crtc_clock_get(struct intel_crtc *crtc,
8672 struct intel_crtc_state *pipe_config)
8673 {
8674 struct drm_device *dev = crtc->base.dev;
8675 struct drm_i915_private *dev_priv = to_i915(dev);
8676 int pipe = pipe_config->cpu_transcoder;
8677 enum dpio_channel port = vlv_pipe_to_channel(pipe);
8678 struct dpll clock;
8679 u32 cmn_dw13, pll_dw0, pll_dw1, pll_dw2, pll_dw3;
8680 int refclk = 100000;
8681
8682
8683 if ((pipe_config->dpll_hw_state.dpll & DPLL_VCO_ENABLE) == 0)
8684 return;
8685
8686 vlv_dpio_get(dev_priv);
8687 cmn_dw13 = vlv_dpio_read(dev_priv, pipe, CHV_CMN_DW13(port));
8688 pll_dw0 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW0(port));
8689 pll_dw1 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW1(port));
8690 pll_dw2 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW2(port));
8691 pll_dw3 = vlv_dpio_read(dev_priv, pipe, CHV_PLL_DW3(port));
8692 vlv_dpio_put(dev_priv);
8693
8694 clock.m1 = (pll_dw1 & 0x7) == DPIO_CHV_M1_DIV_BY_2 ? 2 : 0;
8695 clock.m2 = (pll_dw0 & 0xff) << 22;
8696 if (pll_dw3 & DPIO_CHV_FRAC_DIV_EN)
8697 clock.m2 |= pll_dw2 & 0x3fffff;
8698 clock.n = (pll_dw1 >> DPIO_CHV_N_DIV_SHIFT) & 0xf;
8699 clock.p1 = (cmn_dw13 >> DPIO_CHV_P1_DIV_SHIFT) & 0x7;
8700 clock.p2 = (cmn_dw13 >> DPIO_CHV_P2_DIV_SHIFT) & 0x1f;
8701
8702 pipe_config->port_clock = chv_calc_dpll_params(refclk, &clock);
8703 }
8704
8705 static void intel_get_crtc_ycbcr_config(struct intel_crtc *crtc,
8706 struct intel_crtc_state *pipe_config)
8707 {
8708 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8709 enum intel_output_format output = INTEL_OUTPUT_FORMAT_RGB;
8710
8711 pipe_config->lspcon_downsampling = false;
8712
8713 if (IS_BROADWELL(dev_priv) || INTEL_GEN(dev_priv) >= 9) {
8714 u32 tmp = I915_READ(PIPEMISC(crtc->pipe));
8715
8716 if (tmp & PIPEMISC_OUTPUT_COLORSPACE_YUV) {
8717 bool ycbcr420_enabled = tmp & PIPEMISC_YUV420_ENABLE;
8718 bool blend = tmp & PIPEMISC_YUV420_MODE_FULL_BLEND;
8719
8720 if (ycbcr420_enabled) {
8721
8722 if (!blend)
8723 output = INTEL_OUTPUT_FORMAT_INVALID;
8724 else if (!(IS_GEMINILAKE(dev_priv) ||
8725 INTEL_GEN(dev_priv) >= 10))
8726 output = INTEL_OUTPUT_FORMAT_INVALID;
8727 else
8728 output = INTEL_OUTPUT_FORMAT_YCBCR420;
8729 } else {
8730
8731
8732
8733
8734
8735
8736
8737
8738
8739 pipe_config->lspcon_downsampling = true;
8740 output = INTEL_OUTPUT_FORMAT_YCBCR444;
8741 }
8742 }
8743 }
8744
8745 pipe_config->output_format = output;
8746 }
8747
8748 static void i9xx_get_pipe_color_config(struct intel_crtc_state *crtc_state)
8749 {
8750 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
8751 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
8752 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8753 enum i9xx_plane_id i9xx_plane = plane->i9xx_plane;
8754 u32 tmp;
8755
8756 tmp = I915_READ(DSPCNTR(i9xx_plane));
8757
8758 if (tmp & DISPPLANE_GAMMA_ENABLE)
8759 crtc_state->gamma_enable = true;
8760
8761 if (!HAS_GMCH(dev_priv) &&
8762 tmp & DISPPLANE_PIPE_CSC_ENABLE)
8763 crtc_state->csc_enable = true;
8764 }
8765
8766 static bool i9xx_get_pipe_config(struct intel_crtc *crtc,
8767 struct intel_crtc_state *pipe_config)
8768 {
8769 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
8770 enum intel_display_power_domain power_domain;
8771 intel_wakeref_t wakeref;
8772 u32 tmp;
8773 bool ret;
8774
8775 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
8776 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
8777 if (!wakeref)
8778 return false;
8779
8780 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
8781 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
8782 pipe_config->shared_dpll = NULL;
8783
8784 ret = false;
8785
8786 tmp = I915_READ(PIPECONF(crtc->pipe));
8787 if (!(tmp & PIPECONF_ENABLE))
8788 goto out;
8789
8790 if (IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
8791 IS_CHERRYVIEW(dev_priv)) {
8792 switch (tmp & PIPECONF_BPC_MASK) {
8793 case PIPECONF_6BPC:
8794 pipe_config->pipe_bpp = 18;
8795 break;
8796 case PIPECONF_8BPC:
8797 pipe_config->pipe_bpp = 24;
8798 break;
8799 case PIPECONF_10BPC:
8800 pipe_config->pipe_bpp = 30;
8801 break;
8802 default:
8803 break;
8804 }
8805 }
8806
8807 if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
8808 (tmp & PIPECONF_COLOR_RANGE_SELECT))
8809 pipe_config->limited_color_range = true;
8810
8811 pipe_config->gamma_mode = (tmp & PIPECONF_GAMMA_MODE_MASK_I9XX) >>
8812 PIPECONF_GAMMA_MODE_SHIFT;
8813
8814 if (IS_CHERRYVIEW(dev_priv))
8815 pipe_config->cgm_mode = I915_READ(CGM_PIPE_MODE(crtc->pipe));
8816
8817 i9xx_get_pipe_color_config(pipe_config);
8818 intel_color_get_config(pipe_config);
8819
8820 if (INTEL_GEN(dev_priv) < 4)
8821 pipe_config->double_wide = tmp & PIPECONF_DOUBLE_WIDE;
8822
8823 intel_get_pipe_timings(crtc, pipe_config);
8824 intel_get_pipe_src_size(crtc, pipe_config);
8825
8826 i9xx_get_pfit_config(crtc, pipe_config);
8827
8828 if (INTEL_GEN(dev_priv) >= 4) {
8829
8830 if (IS_CHERRYVIEW(dev_priv) && crtc->pipe != PIPE_A)
8831 tmp = dev_priv->chv_dpll_md[crtc->pipe];
8832 else
8833 tmp = I915_READ(DPLL_MD(crtc->pipe));
8834 pipe_config->pixel_multiplier =
8835 ((tmp & DPLL_MD_UDI_MULTIPLIER_MASK)
8836 >> DPLL_MD_UDI_MULTIPLIER_SHIFT) + 1;
8837 pipe_config->dpll_hw_state.dpll_md = tmp;
8838 } else if (IS_I945G(dev_priv) || IS_I945GM(dev_priv) ||
8839 IS_G33(dev_priv) || IS_PINEVIEW(dev_priv)) {
8840 tmp = I915_READ(DPLL(crtc->pipe));
8841 pipe_config->pixel_multiplier =
8842 ((tmp & SDVO_MULTIPLIER_MASK)
8843 >> SDVO_MULTIPLIER_SHIFT_HIRES) + 1;
8844 } else {
8845
8846
8847
8848 pipe_config->pixel_multiplier = 1;
8849 }
8850 pipe_config->dpll_hw_state.dpll = I915_READ(DPLL(crtc->pipe));
8851 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv)) {
8852 pipe_config->dpll_hw_state.fp0 = I915_READ(FP0(crtc->pipe));
8853 pipe_config->dpll_hw_state.fp1 = I915_READ(FP1(crtc->pipe));
8854 } else {
8855
8856 pipe_config->dpll_hw_state.dpll &= ~(DPLL_LOCK_VLV |
8857 DPLL_PORTC_READY_MASK |
8858 DPLL_PORTB_READY_MASK);
8859 }
8860
8861 if (IS_CHERRYVIEW(dev_priv))
8862 chv_crtc_clock_get(crtc, pipe_config);
8863 else if (IS_VALLEYVIEW(dev_priv))
8864 vlv_crtc_clock_get(crtc, pipe_config);
8865 else
8866 i9xx_crtc_clock_get(crtc, pipe_config);
8867
8868
8869
8870
8871
8872
8873 pipe_config->base.adjusted_mode.crtc_clock =
8874 pipe_config->port_clock / pipe_config->pixel_multiplier;
8875
8876 ret = true;
8877
8878 out:
8879 intel_display_power_put(dev_priv, power_domain, wakeref);
8880
8881 return ret;
8882 }
8883
8884 static void ironlake_init_pch_refclk(struct drm_i915_private *dev_priv)
8885 {
8886 struct intel_encoder *encoder;
8887 int i;
8888 u32 val, final;
8889 bool has_lvds = false;
8890 bool has_cpu_edp = false;
8891 bool has_panel = false;
8892 bool has_ck505 = false;
8893 bool can_ssc = false;
8894 bool using_ssc_source = false;
8895
8896
8897 for_each_intel_encoder(&dev_priv->drm, encoder) {
8898 switch (encoder->type) {
8899 case INTEL_OUTPUT_LVDS:
8900 has_panel = true;
8901 has_lvds = true;
8902 break;
8903 case INTEL_OUTPUT_EDP:
8904 has_panel = true;
8905 if (encoder->port == PORT_A)
8906 has_cpu_edp = true;
8907 break;
8908 default:
8909 break;
8910 }
8911 }
8912
8913 if (HAS_PCH_IBX(dev_priv)) {
8914 has_ck505 = dev_priv->vbt.display_clock_mode;
8915 can_ssc = has_ck505;
8916 } else {
8917 has_ck505 = false;
8918 can_ssc = true;
8919 }
8920
8921
8922 for (i = 0; i < dev_priv->num_shared_dpll; i++) {
8923 u32 temp = I915_READ(PCH_DPLL(i));
8924
8925 if (!(temp & DPLL_VCO_ENABLE))
8926 continue;
8927
8928 if ((temp & PLL_REF_INPUT_MASK) ==
8929 PLLB_REF_INPUT_SPREADSPECTRUMIN) {
8930 using_ssc_source = true;
8931 break;
8932 }
8933 }
8934
8935 DRM_DEBUG_KMS("has_panel %d has_lvds %d has_ck505 %d using_ssc_source %d\n",
8936 has_panel, has_lvds, has_ck505, using_ssc_source);
8937
8938
8939
8940
8941
8942
8943 val = I915_READ(PCH_DREF_CONTROL);
8944
8945
8946
8947
8948
8949 final = val;
8950 final &= ~DREF_NONSPREAD_SOURCE_MASK;
8951 if (has_ck505)
8952 final |= DREF_NONSPREAD_CK505_ENABLE;
8953 else
8954 final |= DREF_NONSPREAD_SOURCE_ENABLE;
8955
8956 final &= ~DREF_SSC_SOURCE_MASK;
8957 final &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
8958 final &= ~DREF_SSC1_ENABLE;
8959
8960 if (has_panel) {
8961 final |= DREF_SSC_SOURCE_ENABLE;
8962
8963 if (intel_panel_use_ssc(dev_priv) && can_ssc)
8964 final |= DREF_SSC1_ENABLE;
8965
8966 if (has_cpu_edp) {
8967 if (intel_panel_use_ssc(dev_priv) && can_ssc)
8968 final |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
8969 else
8970 final |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
8971 } else
8972 final |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
8973 } else if (using_ssc_source) {
8974 final |= DREF_SSC_SOURCE_ENABLE;
8975 final |= DREF_SSC1_ENABLE;
8976 }
8977
8978 if (final == val)
8979 return;
8980
8981
8982 val &= ~DREF_NONSPREAD_SOURCE_MASK;
8983
8984 if (has_ck505)
8985 val |= DREF_NONSPREAD_CK505_ENABLE;
8986 else
8987 val |= DREF_NONSPREAD_SOURCE_ENABLE;
8988
8989 if (has_panel) {
8990 val &= ~DREF_SSC_SOURCE_MASK;
8991 val |= DREF_SSC_SOURCE_ENABLE;
8992
8993
8994 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
8995 DRM_DEBUG_KMS("Using SSC on panel\n");
8996 val |= DREF_SSC1_ENABLE;
8997 } else
8998 val &= ~DREF_SSC1_ENABLE;
8999
9000
9001 I915_WRITE(PCH_DREF_CONTROL, val);
9002 POSTING_READ(PCH_DREF_CONTROL);
9003 udelay(200);
9004
9005 val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
9006
9007
9008 if (has_cpu_edp) {
9009 if (intel_panel_use_ssc(dev_priv) && can_ssc) {
9010 DRM_DEBUG_KMS("Using SSC on eDP\n");
9011 val |= DREF_CPU_SOURCE_OUTPUT_DOWNSPREAD;
9012 } else
9013 val |= DREF_CPU_SOURCE_OUTPUT_NONSPREAD;
9014 } else
9015 val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
9016
9017 I915_WRITE(PCH_DREF_CONTROL, val);
9018 POSTING_READ(PCH_DREF_CONTROL);
9019 udelay(200);
9020 } else {
9021 DRM_DEBUG_KMS("Disabling CPU source output\n");
9022
9023 val &= ~DREF_CPU_SOURCE_OUTPUT_MASK;
9024
9025
9026 val |= DREF_CPU_SOURCE_OUTPUT_DISABLE;
9027
9028 I915_WRITE(PCH_DREF_CONTROL, val);
9029 POSTING_READ(PCH_DREF_CONTROL);
9030 udelay(200);
9031
9032 if (!using_ssc_source) {
9033 DRM_DEBUG_KMS("Disabling SSC source\n");
9034
9035
9036 val &= ~DREF_SSC_SOURCE_MASK;
9037 val |= DREF_SSC_SOURCE_DISABLE;
9038
9039
9040 val &= ~DREF_SSC1_ENABLE;
9041
9042 I915_WRITE(PCH_DREF_CONTROL, val);
9043 POSTING_READ(PCH_DREF_CONTROL);
9044 udelay(200);
9045 }
9046 }
9047
9048 BUG_ON(val != final);
9049 }
9050
9051 static void lpt_reset_fdi_mphy(struct drm_i915_private *dev_priv)
9052 {
9053 u32 tmp;
9054
9055 tmp = I915_READ(SOUTH_CHICKEN2);
9056 tmp |= FDI_MPHY_IOSFSB_RESET_CTL;
9057 I915_WRITE(SOUTH_CHICKEN2, tmp);
9058
9059 if (wait_for_us(I915_READ(SOUTH_CHICKEN2) &
9060 FDI_MPHY_IOSFSB_RESET_STATUS, 100))
9061 DRM_ERROR("FDI mPHY reset assert timeout\n");
9062
9063 tmp = I915_READ(SOUTH_CHICKEN2);
9064 tmp &= ~FDI_MPHY_IOSFSB_RESET_CTL;
9065 I915_WRITE(SOUTH_CHICKEN2, tmp);
9066
9067 if (wait_for_us((I915_READ(SOUTH_CHICKEN2) &
9068 FDI_MPHY_IOSFSB_RESET_STATUS) == 0, 100))
9069 DRM_ERROR("FDI mPHY reset de-assert timeout\n");
9070 }
9071
9072
9073 static void lpt_program_fdi_mphy(struct drm_i915_private *dev_priv)
9074 {
9075 u32 tmp;
9076
9077 tmp = intel_sbi_read(dev_priv, 0x8008, SBI_MPHY);
9078 tmp &= ~(0xFF << 24);
9079 tmp |= (0x12 << 24);
9080 intel_sbi_write(dev_priv, 0x8008, tmp, SBI_MPHY);
9081
9082 tmp = intel_sbi_read(dev_priv, 0x2008, SBI_MPHY);
9083 tmp |= (1 << 11);
9084 intel_sbi_write(dev_priv, 0x2008, tmp, SBI_MPHY);
9085
9086 tmp = intel_sbi_read(dev_priv, 0x2108, SBI_MPHY);
9087 tmp |= (1 << 11);
9088 intel_sbi_write(dev_priv, 0x2108, tmp, SBI_MPHY);
9089
9090 tmp = intel_sbi_read(dev_priv, 0x206C, SBI_MPHY);
9091 tmp |= (1 << 24) | (1 << 21) | (1 << 18);
9092 intel_sbi_write(dev_priv, 0x206C, tmp, SBI_MPHY);
9093
9094 tmp = intel_sbi_read(dev_priv, 0x216C, SBI_MPHY);
9095 tmp |= (1 << 24) | (1 << 21) | (1 << 18);
9096 intel_sbi_write(dev_priv, 0x216C, tmp, SBI_MPHY);
9097
9098 tmp = intel_sbi_read(dev_priv, 0x2080, SBI_MPHY);
9099 tmp &= ~(7 << 13);
9100 tmp |= (5 << 13);
9101 intel_sbi_write(dev_priv, 0x2080, tmp, SBI_MPHY);
9102
9103 tmp = intel_sbi_read(dev_priv, 0x2180, SBI_MPHY);
9104 tmp &= ~(7 << 13);
9105 tmp |= (5 << 13);
9106 intel_sbi_write(dev_priv, 0x2180, tmp, SBI_MPHY);
9107
9108 tmp = intel_sbi_read(dev_priv, 0x208C, SBI_MPHY);
9109 tmp &= ~0xFF;
9110 tmp |= 0x1C;
9111 intel_sbi_write(dev_priv, 0x208C, tmp, SBI_MPHY);
9112
9113 tmp = intel_sbi_read(dev_priv, 0x218C, SBI_MPHY);
9114 tmp &= ~0xFF;
9115 tmp |= 0x1C;
9116 intel_sbi_write(dev_priv, 0x218C, tmp, SBI_MPHY);
9117
9118 tmp = intel_sbi_read(dev_priv, 0x2098, SBI_MPHY);
9119 tmp &= ~(0xFF << 16);
9120 tmp |= (0x1C << 16);
9121 intel_sbi_write(dev_priv, 0x2098, tmp, SBI_MPHY);
9122
9123 tmp = intel_sbi_read(dev_priv, 0x2198, SBI_MPHY);
9124 tmp &= ~(0xFF << 16);
9125 tmp |= (0x1C << 16);
9126 intel_sbi_write(dev_priv, 0x2198, tmp, SBI_MPHY);
9127
9128 tmp = intel_sbi_read(dev_priv, 0x20C4, SBI_MPHY);
9129 tmp |= (1 << 27);
9130 intel_sbi_write(dev_priv, 0x20C4, tmp, SBI_MPHY);
9131
9132 tmp = intel_sbi_read(dev_priv, 0x21C4, SBI_MPHY);
9133 tmp |= (1 << 27);
9134 intel_sbi_write(dev_priv, 0x21C4, tmp, SBI_MPHY);
9135
9136 tmp = intel_sbi_read(dev_priv, 0x20EC, SBI_MPHY);
9137 tmp &= ~(0xF << 28);
9138 tmp |= (4 << 28);
9139 intel_sbi_write(dev_priv, 0x20EC, tmp, SBI_MPHY);
9140
9141 tmp = intel_sbi_read(dev_priv, 0x21EC, SBI_MPHY);
9142 tmp &= ~(0xF << 28);
9143 tmp |= (4 << 28);
9144 intel_sbi_write(dev_priv, 0x21EC, tmp, SBI_MPHY);
9145 }
9146
9147
9148
9149
9150
9151
9152
9153 static void lpt_enable_clkout_dp(struct drm_i915_private *dev_priv,
9154 bool with_spread, bool with_fdi)
9155 {
9156 u32 reg, tmp;
9157
9158 if (WARN(with_fdi && !with_spread, "FDI requires downspread\n"))
9159 with_spread = true;
9160 if (WARN(HAS_PCH_LPT_LP(dev_priv) &&
9161 with_fdi, "LP PCH doesn't have FDI\n"))
9162 with_fdi = false;
9163
9164 mutex_lock(&dev_priv->sb_lock);
9165
9166 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
9167 tmp &= ~SBI_SSCCTL_DISABLE;
9168 tmp |= SBI_SSCCTL_PATHALT;
9169 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
9170
9171 udelay(24);
9172
9173 if (with_spread) {
9174 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
9175 tmp &= ~SBI_SSCCTL_PATHALT;
9176 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
9177
9178 if (with_fdi) {
9179 lpt_reset_fdi_mphy(dev_priv);
9180 lpt_program_fdi_mphy(dev_priv);
9181 }
9182 }
9183
9184 reg = HAS_PCH_LPT_LP(dev_priv) ? SBI_GEN0 : SBI_DBUFF0;
9185 tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
9186 tmp |= SBI_GEN0_CFG_BUFFENABLE_DISABLE;
9187 intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
9188
9189 mutex_unlock(&dev_priv->sb_lock);
9190 }
9191
9192
9193 void lpt_disable_clkout_dp(struct drm_i915_private *dev_priv)
9194 {
9195 u32 reg, tmp;
9196
9197 mutex_lock(&dev_priv->sb_lock);
9198
9199 reg = HAS_PCH_LPT_LP(dev_priv) ? SBI_GEN0 : SBI_DBUFF0;
9200 tmp = intel_sbi_read(dev_priv, reg, SBI_ICLK);
9201 tmp &= ~SBI_GEN0_CFG_BUFFENABLE_DISABLE;
9202 intel_sbi_write(dev_priv, reg, tmp, SBI_ICLK);
9203
9204 tmp = intel_sbi_read(dev_priv, SBI_SSCCTL, SBI_ICLK);
9205 if (!(tmp & SBI_SSCCTL_DISABLE)) {
9206 if (!(tmp & SBI_SSCCTL_PATHALT)) {
9207 tmp |= SBI_SSCCTL_PATHALT;
9208 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
9209 udelay(32);
9210 }
9211 tmp |= SBI_SSCCTL_DISABLE;
9212 intel_sbi_write(dev_priv, SBI_SSCCTL, tmp, SBI_ICLK);
9213 }
9214
9215 mutex_unlock(&dev_priv->sb_lock);
9216 }
9217
9218 #define BEND_IDX(steps) ((50 + (steps)) / 5)
9219
9220 static const u16 sscdivintphase[] = {
9221 [BEND_IDX( 50)] = 0x3B23,
9222 [BEND_IDX( 45)] = 0x3B23,
9223 [BEND_IDX( 40)] = 0x3C23,
9224 [BEND_IDX( 35)] = 0x3C23,
9225 [BEND_IDX( 30)] = 0x3D23,
9226 [BEND_IDX( 25)] = 0x3D23,
9227 [BEND_IDX( 20)] = 0x3E23,
9228 [BEND_IDX( 15)] = 0x3E23,
9229 [BEND_IDX( 10)] = 0x3F23,
9230 [BEND_IDX( 5)] = 0x3F23,
9231 [BEND_IDX( 0)] = 0x0025,
9232 [BEND_IDX( -5)] = 0x0025,
9233 [BEND_IDX(-10)] = 0x0125,
9234 [BEND_IDX(-15)] = 0x0125,
9235 [BEND_IDX(-20)] = 0x0225,
9236 [BEND_IDX(-25)] = 0x0225,
9237 [BEND_IDX(-30)] = 0x0325,
9238 [BEND_IDX(-35)] = 0x0325,
9239 [BEND_IDX(-40)] = 0x0425,
9240 [BEND_IDX(-45)] = 0x0425,
9241 [BEND_IDX(-50)] = 0x0525,
9242 };
9243
9244
9245
9246
9247
9248
9249
9250 static void lpt_bend_clkout_dp(struct drm_i915_private *dev_priv, int steps)
9251 {
9252 u32 tmp;
9253 int idx = BEND_IDX(steps);
9254
9255 if (WARN_ON(steps % 5 != 0))
9256 return;
9257
9258 if (WARN_ON(idx >= ARRAY_SIZE(sscdivintphase)))
9259 return;
9260
9261 mutex_lock(&dev_priv->sb_lock);
9262
9263 if (steps % 10 != 0)
9264 tmp = 0xAAAAAAAB;
9265 else
9266 tmp = 0x00000000;
9267 intel_sbi_write(dev_priv, SBI_SSCDITHPHASE, tmp, SBI_ICLK);
9268
9269 tmp = intel_sbi_read(dev_priv, SBI_SSCDIVINTPHASE, SBI_ICLK);
9270 tmp &= 0xffff0000;
9271 tmp |= sscdivintphase[idx];
9272 intel_sbi_write(dev_priv, SBI_SSCDIVINTPHASE, tmp, SBI_ICLK);
9273
9274 mutex_unlock(&dev_priv->sb_lock);
9275 }
9276
9277 #undef BEND_IDX
9278
9279 static bool spll_uses_pch_ssc(struct drm_i915_private *dev_priv)
9280 {
9281 u32 fuse_strap = I915_READ(FUSE_STRAP);
9282 u32 ctl = I915_READ(SPLL_CTL);
9283
9284 if ((ctl & SPLL_PLL_ENABLE) == 0)
9285 return false;
9286
9287 if ((ctl & SPLL_REF_MASK) == SPLL_REF_MUXED_SSC &&
9288 (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
9289 return true;
9290
9291 if (IS_BROADWELL(dev_priv) &&
9292 (ctl & SPLL_REF_MASK) == SPLL_REF_PCH_SSC_BDW)
9293 return true;
9294
9295 return false;
9296 }
9297
9298 static bool wrpll_uses_pch_ssc(struct drm_i915_private *dev_priv,
9299 enum intel_dpll_id id)
9300 {
9301 u32 fuse_strap = I915_READ(FUSE_STRAP);
9302 u32 ctl = I915_READ(WRPLL_CTL(id));
9303
9304 if ((ctl & WRPLL_PLL_ENABLE) == 0)
9305 return false;
9306
9307 if ((ctl & WRPLL_REF_MASK) == WRPLL_REF_PCH_SSC)
9308 return true;
9309
9310 if ((IS_BROADWELL(dev_priv) || IS_HSW_ULT(dev_priv)) &&
9311 (ctl & WRPLL_REF_MASK) == WRPLL_REF_MUXED_SSC_BDW &&
9312 (fuse_strap & HSW_CPU_SSC_ENABLE) == 0)
9313 return true;
9314
9315 return false;
9316 }
9317
9318 static void lpt_init_pch_refclk(struct drm_i915_private *dev_priv)
9319 {
9320 struct intel_encoder *encoder;
9321 bool has_fdi = false;
9322
9323 for_each_intel_encoder(&dev_priv->drm, encoder) {
9324 switch (encoder->type) {
9325 case INTEL_OUTPUT_ANALOG:
9326 has_fdi = true;
9327 break;
9328 default:
9329 break;
9330 }
9331 }
9332
9333
9334
9335
9336
9337
9338
9339
9340
9341
9342
9343
9344
9345
9346
9347
9348 dev_priv->pch_ssc_use = 0;
9349
9350 if (spll_uses_pch_ssc(dev_priv)) {
9351 DRM_DEBUG_KMS("SPLL using PCH SSC\n");
9352 dev_priv->pch_ssc_use |= BIT(DPLL_ID_SPLL);
9353 }
9354
9355 if (wrpll_uses_pch_ssc(dev_priv, DPLL_ID_WRPLL1)) {
9356 DRM_DEBUG_KMS("WRPLL1 using PCH SSC\n");
9357 dev_priv->pch_ssc_use |= BIT(DPLL_ID_WRPLL1);
9358 }
9359
9360 if (wrpll_uses_pch_ssc(dev_priv, DPLL_ID_WRPLL2)) {
9361 DRM_DEBUG_KMS("WRPLL2 using PCH SSC\n");
9362 dev_priv->pch_ssc_use |= BIT(DPLL_ID_WRPLL2);
9363 }
9364
9365 if (dev_priv->pch_ssc_use)
9366 return;
9367
9368 if (has_fdi) {
9369 lpt_bend_clkout_dp(dev_priv, 0);
9370 lpt_enable_clkout_dp(dev_priv, true, true);
9371 } else {
9372 lpt_disable_clkout_dp(dev_priv);
9373 }
9374 }
9375
9376
9377
9378
9379 void intel_init_pch_refclk(struct drm_i915_private *dev_priv)
9380 {
9381 if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv))
9382 ironlake_init_pch_refclk(dev_priv);
9383 else if (HAS_PCH_LPT(dev_priv))
9384 lpt_init_pch_refclk(dev_priv);
9385 }
9386
9387 static void ironlake_set_pipeconf(const struct intel_crtc_state *crtc_state)
9388 {
9389 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
9390 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9391 enum pipe pipe = crtc->pipe;
9392 u32 val;
9393
9394 val = 0;
9395
9396 switch (crtc_state->pipe_bpp) {
9397 case 18:
9398 val |= PIPECONF_6BPC;
9399 break;
9400 case 24:
9401 val |= PIPECONF_8BPC;
9402 break;
9403 case 30:
9404 val |= PIPECONF_10BPC;
9405 break;
9406 case 36:
9407 val |= PIPECONF_12BPC;
9408 break;
9409 default:
9410
9411 BUG();
9412 }
9413
9414 if (crtc_state->dither)
9415 val |= (PIPECONF_DITHER_EN | PIPECONF_DITHER_TYPE_SP);
9416
9417 if (crtc_state->base.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
9418 val |= PIPECONF_INTERLACED_ILK;
9419 else
9420 val |= PIPECONF_PROGRESSIVE;
9421
9422 if (crtc_state->limited_color_range)
9423 val |= PIPECONF_COLOR_RANGE_SELECT;
9424
9425 val |= PIPECONF_GAMMA_MODE(crtc_state->gamma_mode);
9426
9427 I915_WRITE(PIPECONF(pipe), val);
9428 POSTING_READ(PIPECONF(pipe));
9429 }
9430
9431 static void haswell_set_pipeconf(const struct intel_crtc_state *crtc_state)
9432 {
9433 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
9434 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9435 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
9436 u32 val = 0;
9437
9438 if (IS_HASWELL(dev_priv) && crtc_state->dither)
9439 val |= (PIPECONF_DITHER_EN | PIPECONF_DITHER_TYPE_SP);
9440
9441 if (crtc_state->base.adjusted_mode.flags & DRM_MODE_FLAG_INTERLACE)
9442 val |= PIPECONF_INTERLACED_ILK;
9443 else
9444 val |= PIPECONF_PROGRESSIVE;
9445
9446 I915_WRITE(PIPECONF(cpu_transcoder), val);
9447 POSTING_READ(PIPECONF(cpu_transcoder));
9448 }
9449
9450 static void bdw_set_pipemisc(const struct intel_crtc_state *crtc_state)
9451 {
9452 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
9453 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9454 u32 val = 0;
9455
9456 switch (crtc_state->pipe_bpp) {
9457 case 18:
9458 val |= PIPEMISC_DITHER_6_BPC;
9459 break;
9460 case 24:
9461 val |= PIPEMISC_DITHER_8_BPC;
9462 break;
9463 case 30:
9464 val |= PIPEMISC_DITHER_10_BPC;
9465 break;
9466 case 36:
9467 val |= PIPEMISC_DITHER_12_BPC;
9468 break;
9469 default:
9470 MISSING_CASE(crtc_state->pipe_bpp);
9471 break;
9472 }
9473
9474 if (crtc_state->dither)
9475 val |= PIPEMISC_DITHER_ENABLE | PIPEMISC_DITHER_TYPE_SP;
9476
9477 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 ||
9478 crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444)
9479 val |= PIPEMISC_OUTPUT_COLORSPACE_YUV;
9480
9481 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420)
9482 val |= PIPEMISC_YUV420_ENABLE |
9483 PIPEMISC_YUV420_MODE_FULL_BLEND;
9484
9485 if (INTEL_GEN(dev_priv) >= 11 &&
9486 (crtc_state->active_planes & ~(icl_hdr_plane_mask() |
9487 BIT(PLANE_CURSOR))) == 0)
9488 val |= PIPEMISC_HDR_MODE_PRECISION;
9489
9490 I915_WRITE(PIPEMISC(crtc->pipe), val);
9491 }
9492
9493 int bdw_get_pipemisc_bpp(struct intel_crtc *crtc)
9494 {
9495 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9496 u32 tmp;
9497
9498 tmp = I915_READ(PIPEMISC(crtc->pipe));
9499
9500 switch (tmp & PIPEMISC_DITHER_BPC_MASK) {
9501 case PIPEMISC_DITHER_6_BPC:
9502 return 18;
9503 case PIPEMISC_DITHER_8_BPC:
9504 return 24;
9505 case PIPEMISC_DITHER_10_BPC:
9506 return 30;
9507 case PIPEMISC_DITHER_12_BPC:
9508 return 36;
9509 default:
9510 MISSING_CASE(tmp);
9511 return 0;
9512 }
9513 }
9514
9515 int ironlake_get_lanes_required(int target_clock, int link_bw, int bpp)
9516 {
9517
9518
9519
9520
9521
9522 u32 bps = target_clock * bpp * 21 / 20;
9523 return DIV_ROUND_UP(bps, link_bw * 8);
9524 }
9525
9526 static bool ironlake_needs_fb_cb_tune(struct dpll *dpll, int factor)
9527 {
9528 return i9xx_dpll_compute_m(dpll) < factor * dpll->n;
9529 }
9530
9531 static void ironlake_compute_dpll(struct intel_crtc *crtc,
9532 struct intel_crtc_state *crtc_state,
9533 struct dpll *reduced_clock)
9534 {
9535 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9536 u32 dpll, fp, fp2;
9537 int factor;
9538
9539
9540 factor = 21;
9541 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9542 if ((intel_panel_use_ssc(dev_priv) &&
9543 dev_priv->vbt.lvds_ssc_freq == 100000) ||
9544 (HAS_PCH_IBX(dev_priv) &&
9545 intel_is_dual_link_lvds(dev_priv)))
9546 factor = 25;
9547 } else if (crtc_state->sdvo_tv_clock) {
9548 factor = 20;
9549 }
9550
9551 fp = i9xx_dpll_compute_fp(&crtc_state->dpll);
9552
9553 if (ironlake_needs_fb_cb_tune(&crtc_state->dpll, factor))
9554 fp |= FP_CB_TUNE;
9555
9556 if (reduced_clock) {
9557 fp2 = i9xx_dpll_compute_fp(reduced_clock);
9558
9559 if (reduced_clock->m < factor * reduced_clock->n)
9560 fp2 |= FP_CB_TUNE;
9561 } else {
9562 fp2 = fp;
9563 }
9564
9565 dpll = 0;
9566
9567 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS))
9568 dpll |= DPLLB_MODE_LVDS;
9569 else
9570 dpll |= DPLLB_MODE_DAC_SERIAL;
9571
9572 dpll |= (crtc_state->pixel_multiplier - 1)
9573 << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT;
9574
9575 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_SDVO) ||
9576 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
9577 dpll |= DPLL_SDVO_HIGH_SPEED;
9578
9579 if (intel_crtc_has_dp_encoder(crtc_state))
9580 dpll |= DPLL_SDVO_HIGH_SPEED;
9581
9582
9583
9584
9585
9586
9587
9588
9589
9590
9591
9592
9593
9594
9595
9596 if (INTEL_INFO(dev_priv)->num_pipes == 3 &&
9597 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
9598 dpll |= DPLL_SDVO_HIGH_SPEED;
9599
9600
9601 dpll |= (1 << (crtc_state->dpll.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT;
9602
9603 dpll |= (1 << (crtc_state->dpll.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT;
9604
9605 switch (crtc_state->dpll.p2) {
9606 case 5:
9607 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_5;
9608 break;
9609 case 7:
9610 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_7;
9611 break;
9612 case 10:
9613 dpll |= DPLL_DAC_SERIAL_P2_CLOCK_DIV_10;
9614 break;
9615 case 14:
9616 dpll |= DPLLB_LVDS_P2_CLOCK_DIV_14;
9617 break;
9618 }
9619
9620 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS) &&
9621 intel_panel_use_ssc(dev_priv))
9622 dpll |= PLLB_REF_INPUT_SPREADSPECTRUMIN;
9623 else
9624 dpll |= PLL_REF_INPUT_DREFCLK;
9625
9626 dpll |= DPLL_VCO_ENABLE;
9627
9628 crtc_state->dpll_hw_state.dpll = dpll;
9629 crtc_state->dpll_hw_state.fp0 = fp;
9630 crtc_state->dpll_hw_state.fp1 = fp2;
9631 }
9632
9633 static int ironlake_crtc_compute_clock(struct intel_crtc *crtc,
9634 struct intel_crtc_state *crtc_state)
9635 {
9636 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9637 struct intel_atomic_state *state =
9638 to_intel_atomic_state(crtc_state->base.state);
9639 const struct intel_limit *limit;
9640 int refclk = 120000;
9641
9642 memset(&crtc_state->dpll_hw_state, 0,
9643 sizeof(crtc_state->dpll_hw_state));
9644
9645
9646 if (!crtc_state->has_pch_encoder)
9647 return 0;
9648
9649 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_LVDS)) {
9650 if (intel_panel_use_ssc(dev_priv)) {
9651 DRM_DEBUG_KMS("using SSC reference clock of %d kHz\n",
9652 dev_priv->vbt.lvds_ssc_freq);
9653 refclk = dev_priv->vbt.lvds_ssc_freq;
9654 }
9655
9656 if (intel_is_dual_link_lvds(dev_priv)) {
9657 if (refclk == 100000)
9658 limit = &intel_limits_ironlake_dual_lvds_100m;
9659 else
9660 limit = &intel_limits_ironlake_dual_lvds;
9661 } else {
9662 if (refclk == 100000)
9663 limit = &intel_limits_ironlake_single_lvds_100m;
9664 else
9665 limit = &intel_limits_ironlake_single_lvds;
9666 }
9667 } else {
9668 limit = &intel_limits_ironlake_dac;
9669 }
9670
9671 if (!crtc_state->clock_set &&
9672 !g4x_find_best_dpll(limit, crtc_state, crtc_state->port_clock,
9673 refclk, NULL, &crtc_state->dpll)) {
9674 DRM_ERROR("Couldn't find PLL settings for mode!\n");
9675 return -EINVAL;
9676 }
9677
9678 ironlake_compute_dpll(crtc, crtc_state, NULL);
9679
9680 if (!intel_reserve_shared_dplls(state, crtc, NULL)) {
9681 DRM_DEBUG_KMS("failed to find PLL for pipe %c\n",
9682 pipe_name(crtc->pipe));
9683 return -EINVAL;
9684 }
9685
9686 return 0;
9687 }
9688
9689 static void intel_pch_transcoder_get_m_n(struct intel_crtc *crtc,
9690 struct intel_link_m_n *m_n)
9691 {
9692 struct drm_device *dev = crtc->base.dev;
9693 struct drm_i915_private *dev_priv = to_i915(dev);
9694 enum pipe pipe = crtc->pipe;
9695
9696 m_n->link_m = I915_READ(PCH_TRANS_LINK_M1(pipe));
9697 m_n->link_n = I915_READ(PCH_TRANS_LINK_N1(pipe));
9698 m_n->gmch_m = I915_READ(PCH_TRANS_DATA_M1(pipe))
9699 & ~TU_SIZE_MASK;
9700 m_n->gmch_n = I915_READ(PCH_TRANS_DATA_N1(pipe));
9701 m_n->tu = ((I915_READ(PCH_TRANS_DATA_M1(pipe))
9702 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
9703 }
9704
9705 static void intel_cpu_transcoder_get_m_n(struct intel_crtc *crtc,
9706 enum transcoder transcoder,
9707 struct intel_link_m_n *m_n,
9708 struct intel_link_m_n *m2_n2)
9709 {
9710 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
9711 enum pipe pipe = crtc->pipe;
9712
9713 if (INTEL_GEN(dev_priv) >= 5) {
9714 m_n->link_m = I915_READ(PIPE_LINK_M1(transcoder));
9715 m_n->link_n = I915_READ(PIPE_LINK_N1(transcoder));
9716 m_n->gmch_m = I915_READ(PIPE_DATA_M1(transcoder))
9717 & ~TU_SIZE_MASK;
9718 m_n->gmch_n = I915_READ(PIPE_DATA_N1(transcoder));
9719 m_n->tu = ((I915_READ(PIPE_DATA_M1(transcoder))
9720 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
9721
9722 if (m2_n2 && transcoder_has_m2_n2(dev_priv, transcoder)) {
9723 m2_n2->link_m = I915_READ(PIPE_LINK_M2(transcoder));
9724 m2_n2->link_n = I915_READ(PIPE_LINK_N2(transcoder));
9725 m2_n2->gmch_m = I915_READ(PIPE_DATA_M2(transcoder))
9726 & ~TU_SIZE_MASK;
9727 m2_n2->gmch_n = I915_READ(PIPE_DATA_N2(transcoder));
9728 m2_n2->tu = ((I915_READ(PIPE_DATA_M2(transcoder))
9729 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
9730 }
9731 } else {
9732 m_n->link_m = I915_READ(PIPE_LINK_M_G4X(pipe));
9733 m_n->link_n = I915_READ(PIPE_LINK_N_G4X(pipe));
9734 m_n->gmch_m = I915_READ(PIPE_DATA_M_G4X(pipe))
9735 & ~TU_SIZE_MASK;
9736 m_n->gmch_n = I915_READ(PIPE_DATA_N_G4X(pipe));
9737 m_n->tu = ((I915_READ(PIPE_DATA_M_G4X(pipe))
9738 & TU_SIZE_MASK) >> TU_SIZE_SHIFT) + 1;
9739 }
9740 }
9741
9742 void intel_dp_get_m_n(struct intel_crtc *crtc,
9743 struct intel_crtc_state *pipe_config)
9744 {
9745 if (pipe_config->has_pch_encoder)
9746 intel_pch_transcoder_get_m_n(crtc, &pipe_config->dp_m_n);
9747 else
9748 intel_cpu_transcoder_get_m_n(crtc, pipe_config->cpu_transcoder,
9749 &pipe_config->dp_m_n,
9750 &pipe_config->dp_m2_n2);
9751 }
9752
9753 static void ironlake_get_fdi_m_n_config(struct intel_crtc *crtc,
9754 struct intel_crtc_state *pipe_config)
9755 {
9756 intel_cpu_transcoder_get_m_n(crtc, pipe_config->cpu_transcoder,
9757 &pipe_config->fdi_m_n, NULL);
9758 }
9759
9760 static void skylake_get_pfit_config(struct intel_crtc *crtc,
9761 struct intel_crtc_state *pipe_config)
9762 {
9763 struct drm_device *dev = crtc->base.dev;
9764 struct drm_i915_private *dev_priv = to_i915(dev);
9765 struct intel_crtc_scaler_state *scaler_state = &pipe_config->scaler_state;
9766 u32 ps_ctrl = 0;
9767 int id = -1;
9768 int i;
9769
9770
9771 for (i = 0; i < crtc->num_scalers; i++) {
9772 ps_ctrl = I915_READ(SKL_PS_CTRL(crtc->pipe, i));
9773 if (ps_ctrl & PS_SCALER_EN && !(ps_ctrl & PS_PLANE_SEL_MASK)) {
9774 id = i;
9775 pipe_config->pch_pfit.enabled = true;
9776 pipe_config->pch_pfit.pos = I915_READ(SKL_PS_WIN_POS(crtc->pipe, i));
9777 pipe_config->pch_pfit.size = I915_READ(SKL_PS_WIN_SZ(crtc->pipe, i));
9778 scaler_state->scalers[i].in_use = true;
9779 break;
9780 }
9781 }
9782
9783 scaler_state->scaler_id = id;
9784 if (id >= 0) {
9785 scaler_state->scaler_users |= (1 << SKL_CRTC_INDEX);
9786 } else {
9787 scaler_state->scaler_users &= ~(1 << SKL_CRTC_INDEX);
9788 }
9789 }
9790
9791 static void
9792 skylake_get_initial_plane_config(struct intel_crtc *crtc,
9793 struct intel_initial_plane_config *plane_config)
9794 {
9795 struct drm_device *dev = crtc->base.dev;
9796 struct drm_i915_private *dev_priv = to_i915(dev);
9797 struct intel_plane *plane = to_intel_plane(crtc->base.primary);
9798 enum plane_id plane_id = plane->id;
9799 enum pipe pipe;
9800 u32 val, base, offset, stride_mult, tiling, alpha;
9801 int fourcc, pixel_format;
9802 unsigned int aligned_height;
9803 struct drm_framebuffer *fb;
9804 struct intel_framebuffer *intel_fb;
9805
9806 if (!plane->get_hw_state(plane, &pipe))
9807 return;
9808
9809 WARN_ON(pipe != crtc->pipe);
9810
9811 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
9812 if (!intel_fb) {
9813 DRM_DEBUG_KMS("failed to alloc fb\n");
9814 return;
9815 }
9816
9817 fb = &intel_fb->base;
9818
9819 fb->dev = dev;
9820
9821 val = I915_READ(PLANE_CTL(pipe, plane_id));
9822
9823 if (INTEL_GEN(dev_priv) >= 11)
9824 pixel_format = val & ICL_PLANE_CTL_FORMAT_MASK;
9825 else
9826 pixel_format = val & PLANE_CTL_FORMAT_MASK;
9827
9828 if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv)) {
9829 alpha = I915_READ(PLANE_COLOR_CTL(pipe, plane_id));
9830 alpha &= PLANE_COLOR_ALPHA_MASK;
9831 } else {
9832 alpha = val & PLANE_CTL_ALPHA_MASK;
9833 }
9834
9835 fourcc = skl_format_to_fourcc(pixel_format,
9836 val & PLANE_CTL_ORDER_RGBX, alpha);
9837 fb->format = drm_format_info(fourcc);
9838
9839 tiling = val & PLANE_CTL_TILED_MASK;
9840 switch (tiling) {
9841 case PLANE_CTL_TILED_LINEAR:
9842 fb->modifier = DRM_FORMAT_MOD_LINEAR;
9843 break;
9844 case PLANE_CTL_TILED_X:
9845 plane_config->tiling = I915_TILING_X;
9846 fb->modifier = I915_FORMAT_MOD_X_TILED;
9847 break;
9848 case PLANE_CTL_TILED_Y:
9849 plane_config->tiling = I915_TILING_Y;
9850 if (val & PLANE_CTL_RENDER_DECOMPRESSION_ENABLE)
9851 fb->modifier = I915_FORMAT_MOD_Y_TILED_CCS;
9852 else
9853 fb->modifier = I915_FORMAT_MOD_Y_TILED;
9854 break;
9855 case PLANE_CTL_TILED_YF:
9856 if (val & PLANE_CTL_RENDER_DECOMPRESSION_ENABLE)
9857 fb->modifier = I915_FORMAT_MOD_Yf_TILED_CCS;
9858 else
9859 fb->modifier = I915_FORMAT_MOD_Yf_TILED;
9860 break;
9861 default:
9862 MISSING_CASE(tiling);
9863 goto error;
9864 }
9865
9866
9867
9868
9869
9870 switch (val & PLANE_CTL_ROTATE_MASK) {
9871 case PLANE_CTL_ROTATE_0:
9872 plane_config->rotation = DRM_MODE_ROTATE_0;
9873 break;
9874 case PLANE_CTL_ROTATE_90:
9875 plane_config->rotation = DRM_MODE_ROTATE_270;
9876 break;
9877 case PLANE_CTL_ROTATE_180:
9878 plane_config->rotation = DRM_MODE_ROTATE_180;
9879 break;
9880 case PLANE_CTL_ROTATE_270:
9881 plane_config->rotation = DRM_MODE_ROTATE_90;
9882 break;
9883 }
9884
9885 if (INTEL_GEN(dev_priv) >= 10 &&
9886 val & PLANE_CTL_FLIP_HORIZONTAL)
9887 plane_config->rotation |= DRM_MODE_REFLECT_X;
9888
9889 base = I915_READ(PLANE_SURF(pipe, plane_id)) & 0xfffff000;
9890 plane_config->base = base;
9891
9892 offset = I915_READ(PLANE_OFFSET(pipe, plane_id));
9893
9894 val = I915_READ(PLANE_SIZE(pipe, plane_id));
9895 fb->height = ((val >> 16) & 0xfff) + 1;
9896 fb->width = ((val >> 0) & 0x1fff) + 1;
9897
9898 val = I915_READ(PLANE_STRIDE(pipe, plane_id));
9899 stride_mult = skl_plane_stride_mult(fb, 0, DRM_MODE_ROTATE_0);
9900 fb->pitches[0] = (val & 0x3ff) * stride_mult;
9901
9902 aligned_height = intel_fb_align_height(fb, 0, fb->height);
9903
9904 plane_config->size = fb->pitches[0] * aligned_height;
9905
9906 DRM_DEBUG_KMS("%s/%s with fb: size=%dx%d@%d, offset=%x, pitch %d, size 0x%x\n",
9907 crtc->base.name, plane->base.name, fb->width, fb->height,
9908 fb->format->cpp[0] * 8, base, fb->pitches[0],
9909 plane_config->size);
9910
9911 plane_config->fb = intel_fb;
9912 return;
9913
9914 error:
9915 kfree(intel_fb);
9916 }
9917
9918 static void ironlake_get_pfit_config(struct intel_crtc *crtc,
9919 struct intel_crtc_state *pipe_config)
9920 {
9921 struct drm_device *dev = crtc->base.dev;
9922 struct drm_i915_private *dev_priv = to_i915(dev);
9923 u32 tmp;
9924
9925 tmp = I915_READ(PF_CTL(crtc->pipe));
9926
9927 if (tmp & PF_ENABLE) {
9928 pipe_config->pch_pfit.enabled = true;
9929 pipe_config->pch_pfit.pos = I915_READ(PF_WIN_POS(crtc->pipe));
9930 pipe_config->pch_pfit.size = I915_READ(PF_WIN_SZ(crtc->pipe));
9931
9932
9933
9934
9935 if (IS_GEN(dev_priv, 7)) {
9936 WARN_ON((tmp & PF_PIPE_SEL_MASK_IVB) !=
9937 PF_PIPE_SEL_IVB(crtc->pipe));
9938 }
9939 }
9940 }
9941
9942 static bool ironlake_get_pipe_config(struct intel_crtc *crtc,
9943 struct intel_crtc_state *pipe_config)
9944 {
9945 struct drm_device *dev = crtc->base.dev;
9946 struct drm_i915_private *dev_priv = to_i915(dev);
9947 enum intel_display_power_domain power_domain;
9948 intel_wakeref_t wakeref;
9949 u32 tmp;
9950 bool ret;
9951
9952 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
9953 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
9954 if (!wakeref)
9955 return false;
9956
9957 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
9958 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
9959 pipe_config->shared_dpll = NULL;
9960
9961 ret = false;
9962 tmp = I915_READ(PIPECONF(crtc->pipe));
9963 if (!(tmp & PIPECONF_ENABLE))
9964 goto out;
9965
9966 switch (tmp & PIPECONF_BPC_MASK) {
9967 case PIPECONF_6BPC:
9968 pipe_config->pipe_bpp = 18;
9969 break;
9970 case PIPECONF_8BPC:
9971 pipe_config->pipe_bpp = 24;
9972 break;
9973 case PIPECONF_10BPC:
9974 pipe_config->pipe_bpp = 30;
9975 break;
9976 case PIPECONF_12BPC:
9977 pipe_config->pipe_bpp = 36;
9978 break;
9979 default:
9980 break;
9981 }
9982
9983 if (tmp & PIPECONF_COLOR_RANGE_SELECT)
9984 pipe_config->limited_color_range = true;
9985
9986 pipe_config->gamma_mode = (tmp & PIPECONF_GAMMA_MODE_MASK_ILK) >>
9987 PIPECONF_GAMMA_MODE_SHIFT;
9988
9989 pipe_config->csc_mode = I915_READ(PIPE_CSC_MODE(crtc->pipe));
9990
9991 i9xx_get_pipe_color_config(pipe_config);
9992 intel_color_get_config(pipe_config);
9993
9994 if (I915_READ(PCH_TRANSCONF(crtc->pipe)) & TRANS_ENABLE) {
9995 struct intel_shared_dpll *pll;
9996 enum intel_dpll_id pll_id;
9997
9998 pipe_config->has_pch_encoder = true;
9999
10000 tmp = I915_READ(FDI_RX_CTL(crtc->pipe));
10001 pipe_config->fdi_lanes = ((FDI_DP_PORT_WIDTH_MASK & tmp) >>
10002 FDI_DP_PORT_WIDTH_SHIFT) + 1;
10003
10004 ironlake_get_fdi_m_n_config(crtc, pipe_config);
10005
10006 if (HAS_PCH_IBX(dev_priv)) {
10007
10008
10009
10010
10011 pll_id = (enum intel_dpll_id) crtc->pipe;
10012 } else {
10013 tmp = I915_READ(PCH_DPLL_SEL);
10014 if (tmp & TRANS_DPLLB_SEL(crtc->pipe))
10015 pll_id = DPLL_ID_PCH_PLL_B;
10016 else
10017 pll_id= DPLL_ID_PCH_PLL_A;
10018 }
10019
10020 pipe_config->shared_dpll =
10021 intel_get_shared_dpll_by_id(dev_priv, pll_id);
10022 pll = pipe_config->shared_dpll;
10023
10024 WARN_ON(!pll->info->funcs->get_hw_state(dev_priv, pll,
10025 &pipe_config->dpll_hw_state));
10026
10027 tmp = pipe_config->dpll_hw_state.dpll;
10028 pipe_config->pixel_multiplier =
10029 ((tmp & PLL_REF_SDVO_HDMI_MULTIPLIER_MASK)
10030 >> PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT) + 1;
10031
10032 ironlake_pch_clock_get(crtc, pipe_config);
10033 } else {
10034 pipe_config->pixel_multiplier = 1;
10035 }
10036
10037 intel_get_pipe_timings(crtc, pipe_config);
10038 intel_get_pipe_src_size(crtc, pipe_config);
10039
10040 ironlake_get_pfit_config(crtc, pipe_config);
10041
10042 ret = true;
10043
10044 out:
10045 intel_display_power_put(dev_priv, power_domain, wakeref);
10046
10047 return ret;
10048 }
10049 static int haswell_crtc_compute_clock(struct intel_crtc *crtc,
10050 struct intel_crtc_state *crtc_state)
10051 {
10052 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10053 struct intel_atomic_state *state =
10054 to_intel_atomic_state(crtc_state->base.state);
10055
10056 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) ||
10057 INTEL_GEN(dev_priv) >= 11) {
10058 struct intel_encoder *encoder =
10059 intel_get_crtc_new_encoder(state, crtc_state);
10060
10061 if (!intel_reserve_shared_dplls(state, crtc, encoder)) {
10062 DRM_DEBUG_KMS("failed to find PLL for pipe %c\n",
10063 pipe_name(crtc->pipe));
10064 return -EINVAL;
10065 }
10066 }
10067
10068 return 0;
10069 }
10070
10071 static void cannonlake_get_ddi_pll(struct drm_i915_private *dev_priv,
10072 enum port port,
10073 struct intel_crtc_state *pipe_config)
10074 {
10075 enum intel_dpll_id id;
10076 u32 temp;
10077
10078 temp = I915_READ(DPCLKA_CFGCR0) & DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
10079 id = temp >> DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(port);
10080
10081 if (WARN_ON(id < SKL_DPLL0 || id > SKL_DPLL2))
10082 return;
10083
10084 pipe_config->shared_dpll = intel_get_shared_dpll_by_id(dev_priv, id);
10085 }
10086
10087 static void icelake_get_ddi_pll(struct drm_i915_private *dev_priv,
10088 enum port port,
10089 struct intel_crtc_state *pipe_config)
10090 {
10091 enum phy phy = intel_port_to_phy(dev_priv, port);
10092 enum icl_port_dpll_id port_dpll_id;
10093 enum intel_dpll_id id;
10094 u32 temp;
10095
10096 if (intel_phy_is_combo(dev_priv, phy)) {
10097 temp = I915_READ(ICL_DPCLKA_CFGCR0) &
10098 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
10099 id = temp >> ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy);
10100 port_dpll_id = ICL_PORT_DPLL_DEFAULT;
10101 } else if (intel_phy_is_tc(dev_priv, phy)) {
10102 u32 clk_sel = I915_READ(DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK;
10103
10104 if (clk_sel == DDI_CLK_SEL_MG) {
10105 id = icl_tc_port_to_pll_id(intel_port_to_tc(dev_priv,
10106 port));
10107 port_dpll_id = ICL_PORT_DPLL_MG_PHY;
10108 } else {
10109 WARN_ON(clk_sel < DDI_CLK_SEL_TBT_162);
10110 id = DPLL_ID_ICL_TBTPLL;
10111 port_dpll_id = ICL_PORT_DPLL_DEFAULT;
10112 }
10113 } else {
10114 WARN(1, "Invalid port %x\n", port);
10115 return;
10116 }
10117
10118 pipe_config->icl_port_dplls[port_dpll_id].pll =
10119 intel_get_shared_dpll_by_id(dev_priv, id);
10120
10121 icl_set_active_port_dpll(pipe_config, port_dpll_id);
10122 }
10123
10124 static void bxt_get_ddi_pll(struct drm_i915_private *dev_priv,
10125 enum port port,
10126 struct intel_crtc_state *pipe_config)
10127 {
10128 enum intel_dpll_id id;
10129
10130 switch (port) {
10131 case PORT_A:
10132 id = DPLL_ID_SKL_DPLL0;
10133 break;
10134 case PORT_B:
10135 id = DPLL_ID_SKL_DPLL1;
10136 break;
10137 case PORT_C:
10138 id = DPLL_ID_SKL_DPLL2;
10139 break;
10140 default:
10141 DRM_ERROR("Incorrect port type\n");
10142 return;
10143 }
10144
10145 pipe_config->shared_dpll = intel_get_shared_dpll_by_id(dev_priv, id);
10146 }
10147
10148 static void skylake_get_ddi_pll(struct drm_i915_private *dev_priv,
10149 enum port port,
10150 struct intel_crtc_state *pipe_config)
10151 {
10152 enum intel_dpll_id id;
10153 u32 temp;
10154
10155 temp = I915_READ(DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_SEL_MASK(port);
10156 id = temp >> (port * 3 + 1);
10157
10158 if (WARN_ON(id < SKL_DPLL0 || id > SKL_DPLL3))
10159 return;
10160
10161 pipe_config->shared_dpll = intel_get_shared_dpll_by_id(dev_priv, id);
10162 }
10163
10164 static void haswell_get_ddi_pll(struct drm_i915_private *dev_priv,
10165 enum port port,
10166 struct intel_crtc_state *pipe_config)
10167 {
10168 enum intel_dpll_id id;
10169 u32 ddi_pll_sel = I915_READ(PORT_CLK_SEL(port));
10170
10171 switch (ddi_pll_sel) {
10172 case PORT_CLK_SEL_WRPLL1:
10173 id = DPLL_ID_WRPLL1;
10174 break;
10175 case PORT_CLK_SEL_WRPLL2:
10176 id = DPLL_ID_WRPLL2;
10177 break;
10178 case PORT_CLK_SEL_SPLL:
10179 id = DPLL_ID_SPLL;
10180 break;
10181 case PORT_CLK_SEL_LCPLL_810:
10182 id = DPLL_ID_LCPLL_810;
10183 break;
10184 case PORT_CLK_SEL_LCPLL_1350:
10185 id = DPLL_ID_LCPLL_1350;
10186 break;
10187 case PORT_CLK_SEL_LCPLL_2700:
10188 id = DPLL_ID_LCPLL_2700;
10189 break;
10190 default:
10191 MISSING_CASE(ddi_pll_sel);
10192
10193 case PORT_CLK_SEL_NONE:
10194 return;
10195 }
10196
10197 pipe_config->shared_dpll = intel_get_shared_dpll_by_id(dev_priv, id);
10198 }
10199
10200 static bool hsw_get_transcoder_state(struct intel_crtc *crtc,
10201 struct intel_crtc_state *pipe_config,
10202 u64 *power_domain_mask,
10203 intel_wakeref_t *wakerefs)
10204 {
10205 struct drm_device *dev = crtc->base.dev;
10206 struct drm_i915_private *dev_priv = to_i915(dev);
10207 enum intel_display_power_domain power_domain;
10208 unsigned long panel_transcoder_mask = 0;
10209 unsigned long enabled_panel_transcoders = 0;
10210 enum transcoder panel_transcoder;
10211 intel_wakeref_t wf;
10212 u32 tmp;
10213
10214 if (INTEL_GEN(dev_priv) >= 11)
10215 panel_transcoder_mask |=
10216 BIT(TRANSCODER_DSI_0) | BIT(TRANSCODER_DSI_1);
10217
10218 if (HAS_TRANSCODER_EDP(dev_priv))
10219 panel_transcoder_mask |= BIT(TRANSCODER_EDP);
10220
10221
10222
10223
10224
10225 pipe_config->cpu_transcoder = (enum transcoder) crtc->pipe;
10226
10227
10228
10229
10230
10231 for_each_set_bit(panel_transcoder,
10232 &panel_transcoder_mask,
10233 ARRAY_SIZE(INTEL_INFO(dev_priv)->trans_offsets)) {
10234 bool force_thru = false;
10235 enum pipe trans_pipe;
10236
10237 tmp = I915_READ(TRANS_DDI_FUNC_CTL(panel_transcoder));
10238 if (!(tmp & TRANS_DDI_FUNC_ENABLE))
10239 continue;
10240
10241
10242
10243
10244
10245
10246 enabled_panel_transcoders |= BIT(panel_transcoder);
10247 if (enabled_panel_transcoders != BIT(panel_transcoder))
10248 continue;
10249
10250 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
10251 default:
10252 WARN(1, "unknown pipe linked to transcoder %s\n",
10253 transcoder_name(panel_transcoder));
10254
10255 case TRANS_DDI_EDP_INPUT_A_ONOFF:
10256 force_thru = true;
10257
10258 case TRANS_DDI_EDP_INPUT_A_ON:
10259 trans_pipe = PIPE_A;
10260 break;
10261 case TRANS_DDI_EDP_INPUT_B_ONOFF:
10262 trans_pipe = PIPE_B;
10263 break;
10264 case TRANS_DDI_EDP_INPUT_C_ONOFF:
10265 trans_pipe = PIPE_C;
10266 break;
10267 }
10268
10269 if (trans_pipe == crtc->pipe) {
10270 pipe_config->cpu_transcoder = panel_transcoder;
10271 pipe_config->pch_pfit.force_thru = force_thru;
10272 }
10273 }
10274
10275
10276
10277
10278 WARN_ON((enabled_panel_transcoders & BIT(TRANSCODER_EDP)) &&
10279 enabled_panel_transcoders != BIT(TRANSCODER_EDP));
10280
10281 power_domain = POWER_DOMAIN_TRANSCODER(pipe_config->cpu_transcoder);
10282 WARN_ON(*power_domain_mask & BIT_ULL(power_domain));
10283
10284 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
10285 if (!wf)
10286 return false;
10287
10288 wakerefs[power_domain] = wf;
10289 *power_domain_mask |= BIT_ULL(power_domain);
10290
10291 tmp = I915_READ(PIPECONF(pipe_config->cpu_transcoder));
10292
10293 return tmp & PIPECONF_ENABLE;
10294 }
10295
10296 static bool bxt_get_dsi_transcoder_state(struct intel_crtc *crtc,
10297 struct intel_crtc_state *pipe_config,
10298 u64 *power_domain_mask,
10299 intel_wakeref_t *wakerefs)
10300 {
10301 struct drm_device *dev = crtc->base.dev;
10302 struct drm_i915_private *dev_priv = to_i915(dev);
10303 enum intel_display_power_domain power_domain;
10304 enum transcoder cpu_transcoder;
10305 intel_wakeref_t wf;
10306 enum port port;
10307 u32 tmp;
10308
10309 for_each_port_masked(port, BIT(PORT_A) | BIT(PORT_C)) {
10310 if (port == PORT_A)
10311 cpu_transcoder = TRANSCODER_DSI_A;
10312 else
10313 cpu_transcoder = TRANSCODER_DSI_C;
10314
10315 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
10316 WARN_ON(*power_domain_mask & BIT_ULL(power_domain));
10317
10318 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
10319 if (!wf)
10320 continue;
10321
10322 wakerefs[power_domain] = wf;
10323 *power_domain_mask |= BIT_ULL(power_domain);
10324
10325
10326
10327
10328
10329
10330
10331
10332 if (!bxt_dsi_pll_is_enabled(dev_priv))
10333 break;
10334
10335
10336 tmp = I915_READ(BXT_MIPI_PORT_CTRL(port));
10337 if (!(tmp & DPI_ENABLE))
10338 continue;
10339
10340 tmp = I915_READ(MIPI_CTRL(port));
10341 if ((tmp & BXT_PIPE_SELECT_MASK) != BXT_PIPE_SELECT(crtc->pipe))
10342 continue;
10343
10344 pipe_config->cpu_transcoder = cpu_transcoder;
10345 break;
10346 }
10347
10348 return transcoder_is_dsi(pipe_config->cpu_transcoder);
10349 }
10350
10351 static void haswell_get_ddi_port_state(struct intel_crtc *crtc,
10352 struct intel_crtc_state *pipe_config)
10353 {
10354 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10355 struct intel_shared_dpll *pll;
10356 enum port port;
10357 u32 tmp;
10358
10359 tmp = I915_READ(TRANS_DDI_FUNC_CTL(pipe_config->cpu_transcoder));
10360
10361 if (INTEL_GEN(dev_priv) >= 12)
10362 port = TGL_TRANS_DDI_FUNC_CTL_VAL_TO_PORT(tmp);
10363 else
10364 port = TRANS_DDI_FUNC_CTL_VAL_TO_PORT(tmp);
10365
10366 if (INTEL_GEN(dev_priv) >= 11)
10367 icelake_get_ddi_pll(dev_priv, port, pipe_config);
10368 else if (IS_CANNONLAKE(dev_priv))
10369 cannonlake_get_ddi_pll(dev_priv, port, pipe_config);
10370 else if (IS_GEN9_BC(dev_priv))
10371 skylake_get_ddi_pll(dev_priv, port, pipe_config);
10372 else if (IS_GEN9_LP(dev_priv))
10373 bxt_get_ddi_pll(dev_priv, port, pipe_config);
10374 else
10375 haswell_get_ddi_pll(dev_priv, port, pipe_config);
10376
10377 pll = pipe_config->shared_dpll;
10378 if (pll) {
10379 WARN_ON(!pll->info->funcs->get_hw_state(dev_priv, pll,
10380 &pipe_config->dpll_hw_state));
10381 }
10382
10383
10384
10385
10386
10387
10388 if (INTEL_GEN(dev_priv) < 9 &&
10389 (port == PORT_E) && I915_READ(LPT_TRANSCONF) & TRANS_ENABLE) {
10390 pipe_config->has_pch_encoder = true;
10391
10392 tmp = I915_READ(FDI_RX_CTL(PIPE_A));
10393 pipe_config->fdi_lanes = ((FDI_DP_PORT_WIDTH_MASK & tmp) >>
10394 FDI_DP_PORT_WIDTH_SHIFT) + 1;
10395
10396 ironlake_get_fdi_m_n_config(crtc, pipe_config);
10397 }
10398 }
10399
10400 static bool haswell_get_pipe_config(struct intel_crtc *crtc,
10401 struct intel_crtc_state *pipe_config)
10402 {
10403 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10404 intel_wakeref_t wakerefs[POWER_DOMAIN_NUM], wf;
10405 enum intel_display_power_domain power_domain;
10406 u64 power_domain_mask;
10407 bool active;
10408
10409 intel_crtc_init_scalers(crtc, pipe_config);
10410
10411 power_domain = POWER_DOMAIN_PIPE(crtc->pipe);
10412 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
10413 if (!wf)
10414 return false;
10415
10416 wakerefs[power_domain] = wf;
10417 power_domain_mask = BIT_ULL(power_domain);
10418
10419 pipe_config->shared_dpll = NULL;
10420
10421 active = hsw_get_transcoder_state(crtc, pipe_config,
10422 &power_domain_mask, wakerefs);
10423
10424 if (IS_GEN9_LP(dev_priv) &&
10425 bxt_get_dsi_transcoder_state(crtc, pipe_config,
10426 &power_domain_mask, wakerefs)) {
10427 WARN_ON(active);
10428 active = true;
10429 }
10430
10431 if (!active)
10432 goto out;
10433
10434 if (!transcoder_is_dsi(pipe_config->cpu_transcoder) ||
10435 INTEL_GEN(dev_priv) >= 11) {
10436 haswell_get_ddi_port_state(crtc, pipe_config);
10437 intel_get_pipe_timings(crtc, pipe_config);
10438 }
10439
10440 intel_get_pipe_src_size(crtc, pipe_config);
10441 intel_get_crtc_ycbcr_config(crtc, pipe_config);
10442
10443 pipe_config->gamma_mode = I915_READ(GAMMA_MODE(crtc->pipe));
10444
10445 pipe_config->csc_mode = I915_READ(PIPE_CSC_MODE(crtc->pipe));
10446
10447 if (INTEL_GEN(dev_priv) >= 9) {
10448 u32 tmp = I915_READ(SKL_BOTTOM_COLOR(crtc->pipe));
10449
10450 if (tmp & SKL_BOTTOM_COLOR_GAMMA_ENABLE)
10451 pipe_config->gamma_enable = true;
10452
10453 if (tmp & SKL_BOTTOM_COLOR_CSC_ENABLE)
10454 pipe_config->csc_enable = true;
10455 } else {
10456 i9xx_get_pipe_color_config(pipe_config);
10457 }
10458
10459 intel_color_get_config(pipe_config);
10460
10461 power_domain = POWER_DOMAIN_PIPE_PANEL_FITTER(crtc->pipe);
10462 WARN_ON(power_domain_mask & BIT_ULL(power_domain));
10463
10464 wf = intel_display_power_get_if_enabled(dev_priv, power_domain);
10465 if (wf) {
10466 wakerefs[power_domain] = wf;
10467 power_domain_mask |= BIT_ULL(power_domain);
10468
10469 if (INTEL_GEN(dev_priv) >= 9)
10470 skylake_get_pfit_config(crtc, pipe_config);
10471 else
10472 ironlake_get_pfit_config(crtc, pipe_config);
10473 }
10474
10475 if (hsw_crtc_supports_ips(crtc)) {
10476 if (IS_HASWELL(dev_priv))
10477 pipe_config->ips_enabled = I915_READ(IPS_CTL) & IPS_ENABLE;
10478 else {
10479
10480
10481
10482
10483
10484 pipe_config->ips_enabled = true;
10485 }
10486 }
10487
10488 if (pipe_config->cpu_transcoder != TRANSCODER_EDP &&
10489 !transcoder_is_dsi(pipe_config->cpu_transcoder)) {
10490 pipe_config->pixel_multiplier =
10491 I915_READ(PIPE_MULT(pipe_config->cpu_transcoder)) + 1;
10492 } else {
10493 pipe_config->pixel_multiplier = 1;
10494 }
10495
10496 out:
10497 for_each_power_domain(power_domain, power_domain_mask)
10498 intel_display_power_put(dev_priv,
10499 power_domain, wakerefs[power_domain]);
10500
10501 return active;
10502 }
10503
10504 static u32 intel_cursor_base(const struct intel_plane_state *plane_state)
10505 {
10506 struct drm_i915_private *dev_priv =
10507 to_i915(plane_state->base.plane->dev);
10508 const struct drm_framebuffer *fb = plane_state->base.fb;
10509 const struct drm_i915_gem_object *obj = intel_fb_obj(fb);
10510 u32 base;
10511
10512 if (INTEL_INFO(dev_priv)->display.cursor_needs_physical)
10513 base = sg_dma_address(obj->mm.pages->sgl);
10514 else
10515 base = intel_plane_ggtt_offset(plane_state);
10516
10517 base += plane_state->color_plane[0].offset;
10518
10519
10520 if (HAS_GMCH(dev_priv) &&
10521 plane_state->base.rotation & DRM_MODE_ROTATE_180)
10522 base += (plane_state->base.crtc_h *
10523 plane_state->base.crtc_w - 1) * fb->format->cpp[0];
10524
10525 return base;
10526 }
10527
10528 static u32 intel_cursor_position(const struct intel_plane_state *plane_state)
10529 {
10530 int x = plane_state->base.crtc_x;
10531 int y = plane_state->base.crtc_y;
10532 u32 pos = 0;
10533
10534 if (x < 0) {
10535 pos |= CURSOR_POS_SIGN << CURSOR_X_SHIFT;
10536 x = -x;
10537 }
10538 pos |= x << CURSOR_X_SHIFT;
10539
10540 if (y < 0) {
10541 pos |= CURSOR_POS_SIGN << CURSOR_Y_SHIFT;
10542 y = -y;
10543 }
10544 pos |= y << CURSOR_Y_SHIFT;
10545
10546 return pos;
10547 }
10548
10549 static bool intel_cursor_size_ok(const struct intel_plane_state *plane_state)
10550 {
10551 const struct drm_mode_config *config =
10552 &plane_state->base.plane->dev->mode_config;
10553 int width = plane_state->base.crtc_w;
10554 int height = plane_state->base.crtc_h;
10555
10556 return width > 0 && width <= config->cursor_width &&
10557 height > 0 && height <= config->cursor_height;
10558 }
10559
10560 static int intel_cursor_check_surface(struct intel_plane_state *plane_state)
10561 {
10562 int src_x, src_y;
10563 u32 offset;
10564 int ret;
10565
10566 ret = intel_plane_compute_gtt(plane_state);
10567 if (ret)
10568 return ret;
10569
10570 if (!plane_state->base.visible)
10571 return 0;
10572
10573 src_x = plane_state->base.src_x >> 16;
10574 src_y = plane_state->base.src_y >> 16;
10575
10576 intel_add_fb_offsets(&src_x, &src_y, plane_state, 0);
10577 offset = intel_plane_compute_aligned_offset(&src_x, &src_y,
10578 plane_state, 0);
10579
10580 if (src_x != 0 || src_y != 0) {
10581 DRM_DEBUG_KMS("Arbitrary cursor panning not supported\n");
10582 return -EINVAL;
10583 }
10584
10585 plane_state->color_plane[0].offset = offset;
10586
10587 return 0;
10588 }
10589
10590 static int intel_check_cursor(struct intel_crtc_state *crtc_state,
10591 struct intel_plane_state *plane_state)
10592 {
10593 const struct drm_framebuffer *fb = plane_state->base.fb;
10594 int ret;
10595
10596 if (fb && fb->modifier != DRM_FORMAT_MOD_LINEAR) {
10597 DRM_DEBUG_KMS("cursor cannot be tiled\n");
10598 return -EINVAL;
10599 }
10600
10601 ret = drm_atomic_helper_check_plane_state(&plane_state->base,
10602 &crtc_state->base,
10603 DRM_PLANE_HELPER_NO_SCALING,
10604 DRM_PLANE_HELPER_NO_SCALING,
10605 true, true);
10606 if (ret)
10607 return ret;
10608
10609 ret = intel_cursor_check_surface(plane_state);
10610 if (ret)
10611 return ret;
10612
10613 if (!plane_state->base.visible)
10614 return 0;
10615
10616 ret = intel_plane_check_src_coordinates(plane_state);
10617 if (ret)
10618 return ret;
10619
10620 return 0;
10621 }
10622
10623 static unsigned int
10624 i845_cursor_max_stride(struct intel_plane *plane,
10625 u32 pixel_format, u64 modifier,
10626 unsigned int rotation)
10627 {
10628 return 2048;
10629 }
10630
10631 static u32 i845_cursor_ctl_crtc(const struct intel_crtc_state *crtc_state)
10632 {
10633 u32 cntl = 0;
10634
10635 if (crtc_state->gamma_enable)
10636 cntl |= CURSOR_GAMMA_ENABLE;
10637
10638 return cntl;
10639 }
10640
10641 static u32 i845_cursor_ctl(const struct intel_crtc_state *crtc_state,
10642 const struct intel_plane_state *plane_state)
10643 {
10644 return CURSOR_ENABLE |
10645 CURSOR_FORMAT_ARGB |
10646 CURSOR_STRIDE(plane_state->color_plane[0].stride);
10647 }
10648
10649 static bool i845_cursor_size_ok(const struct intel_plane_state *plane_state)
10650 {
10651 int width = plane_state->base.crtc_w;
10652
10653
10654
10655
10656
10657 return intel_cursor_size_ok(plane_state) && IS_ALIGNED(width, 64);
10658 }
10659
10660 static int i845_check_cursor(struct intel_crtc_state *crtc_state,
10661 struct intel_plane_state *plane_state)
10662 {
10663 const struct drm_framebuffer *fb = plane_state->base.fb;
10664 int ret;
10665
10666 ret = intel_check_cursor(crtc_state, plane_state);
10667 if (ret)
10668 return ret;
10669
10670
10671 if (!fb)
10672 return 0;
10673
10674
10675 if (!i845_cursor_size_ok(plane_state)) {
10676 DRM_DEBUG("Cursor dimension %dx%d not supported\n",
10677 plane_state->base.crtc_w,
10678 plane_state->base.crtc_h);
10679 return -EINVAL;
10680 }
10681
10682 WARN_ON(plane_state->base.visible &&
10683 plane_state->color_plane[0].stride != fb->pitches[0]);
10684
10685 switch (fb->pitches[0]) {
10686 case 256:
10687 case 512:
10688 case 1024:
10689 case 2048:
10690 break;
10691 default:
10692 DRM_DEBUG_KMS("Invalid cursor stride (%u)\n",
10693 fb->pitches[0]);
10694 return -EINVAL;
10695 }
10696
10697 plane_state->ctl = i845_cursor_ctl(crtc_state, plane_state);
10698
10699 return 0;
10700 }
10701
10702 static void i845_update_cursor(struct intel_plane *plane,
10703 const struct intel_crtc_state *crtc_state,
10704 const struct intel_plane_state *plane_state)
10705 {
10706 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
10707 u32 cntl = 0, base = 0, pos = 0, size = 0;
10708 unsigned long irqflags;
10709
10710 if (plane_state && plane_state->base.visible) {
10711 unsigned int width = plane_state->base.crtc_w;
10712 unsigned int height = plane_state->base.crtc_h;
10713
10714 cntl = plane_state->ctl |
10715 i845_cursor_ctl_crtc(crtc_state);
10716
10717 size = (height << 12) | width;
10718
10719 base = intel_cursor_base(plane_state);
10720 pos = intel_cursor_position(plane_state);
10721 }
10722
10723 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
10724
10725
10726
10727
10728 if (plane->cursor.base != base ||
10729 plane->cursor.size != size ||
10730 plane->cursor.cntl != cntl) {
10731 I915_WRITE_FW(CURCNTR(PIPE_A), 0);
10732 I915_WRITE_FW(CURBASE(PIPE_A), base);
10733 I915_WRITE_FW(CURSIZE, size);
10734 I915_WRITE_FW(CURPOS(PIPE_A), pos);
10735 I915_WRITE_FW(CURCNTR(PIPE_A), cntl);
10736
10737 plane->cursor.base = base;
10738 plane->cursor.size = size;
10739 plane->cursor.cntl = cntl;
10740 } else {
10741 I915_WRITE_FW(CURPOS(PIPE_A), pos);
10742 }
10743
10744 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
10745 }
10746
10747 static void i845_disable_cursor(struct intel_plane *plane,
10748 const struct intel_crtc_state *crtc_state)
10749 {
10750 i845_update_cursor(plane, crtc_state, NULL);
10751 }
10752
10753 static bool i845_cursor_get_hw_state(struct intel_plane *plane,
10754 enum pipe *pipe)
10755 {
10756 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
10757 enum intel_display_power_domain power_domain;
10758 intel_wakeref_t wakeref;
10759 bool ret;
10760
10761 power_domain = POWER_DOMAIN_PIPE(PIPE_A);
10762 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
10763 if (!wakeref)
10764 return false;
10765
10766 ret = I915_READ(CURCNTR(PIPE_A)) & CURSOR_ENABLE;
10767
10768 *pipe = PIPE_A;
10769
10770 intel_display_power_put(dev_priv, power_domain, wakeref);
10771
10772 return ret;
10773 }
10774
10775 static unsigned int
10776 i9xx_cursor_max_stride(struct intel_plane *plane,
10777 u32 pixel_format, u64 modifier,
10778 unsigned int rotation)
10779 {
10780 return plane->base.dev->mode_config.cursor_width * 4;
10781 }
10782
10783 static u32 i9xx_cursor_ctl_crtc(const struct intel_crtc_state *crtc_state)
10784 {
10785 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
10786 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
10787 u32 cntl = 0;
10788
10789 if (INTEL_GEN(dev_priv) >= 11)
10790 return cntl;
10791
10792 if (crtc_state->gamma_enable)
10793 cntl = MCURSOR_GAMMA_ENABLE;
10794
10795 if (crtc_state->csc_enable)
10796 cntl |= MCURSOR_PIPE_CSC_ENABLE;
10797
10798 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
10799 cntl |= MCURSOR_PIPE_SELECT(crtc->pipe);
10800
10801 return cntl;
10802 }
10803
10804 static u32 i9xx_cursor_ctl(const struct intel_crtc_state *crtc_state,
10805 const struct intel_plane_state *plane_state)
10806 {
10807 struct drm_i915_private *dev_priv =
10808 to_i915(plane_state->base.plane->dev);
10809 u32 cntl = 0;
10810
10811 if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
10812 cntl |= MCURSOR_TRICKLE_FEED_DISABLE;
10813
10814 switch (plane_state->base.crtc_w) {
10815 case 64:
10816 cntl |= MCURSOR_MODE_64_ARGB_AX;
10817 break;
10818 case 128:
10819 cntl |= MCURSOR_MODE_128_ARGB_AX;
10820 break;
10821 case 256:
10822 cntl |= MCURSOR_MODE_256_ARGB_AX;
10823 break;
10824 default:
10825 MISSING_CASE(plane_state->base.crtc_w);
10826 return 0;
10827 }
10828
10829 if (plane_state->base.rotation & DRM_MODE_ROTATE_180)
10830 cntl |= MCURSOR_ROTATE_180;
10831
10832 return cntl;
10833 }
10834
10835 static bool i9xx_cursor_size_ok(const struct intel_plane_state *plane_state)
10836 {
10837 struct drm_i915_private *dev_priv =
10838 to_i915(plane_state->base.plane->dev);
10839 int width = plane_state->base.crtc_w;
10840 int height = plane_state->base.crtc_h;
10841
10842 if (!intel_cursor_size_ok(plane_state))
10843 return false;
10844
10845
10846 switch (width) {
10847 case 256:
10848 case 128:
10849 case 64:
10850 break;
10851 default:
10852 return false;
10853 }
10854
10855
10856
10857
10858
10859
10860
10861 if (HAS_CUR_FBC(dev_priv) &&
10862 plane_state->base.rotation & DRM_MODE_ROTATE_0) {
10863 if (height < 8 || height > width)
10864 return false;
10865 } else {
10866 if (height != width)
10867 return false;
10868 }
10869
10870 return true;
10871 }
10872
10873 static int i9xx_check_cursor(struct intel_crtc_state *crtc_state,
10874 struct intel_plane_state *plane_state)
10875 {
10876 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
10877 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
10878 const struct drm_framebuffer *fb = plane_state->base.fb;
10879 enum pipe pipe = plane->pipe;
10880 int ret;
10881
10882 ret = intel_check_cursor(crtc_state, plane_state);
10883 if (ret)
10884 return ret;
10885
10886
10887 if (!fb)
10888 return 0;
10889
10890
10891 if (!i9xx_cursor_size_ok(plane_state)) {
10892 DRM_DEBUG("Cursor dimension %dx%d not supported\n",
10893 plane_state->base.crtc_w,
10894 plane_state->base.crtc_h);
10895 return -EINVAL;
10896 }
10897
10898 WARN_ON(plane_state->base.visible &&
10899 plane_state->color_plane[0].stride != fb->pitches[0]);
10900
10901 if (fb->pitches[0] != plane_state->base.crtc_w * fb->format->cpp[0]) {
10902 DRM_DEBUG_KMS("Invalid cursor stride (%u) (cursor width %d)\n",
10903 fb->pitches[0], plane_state->base.crtc_w);
10904 return -EINVAL;
10905 }
10906
10907
10908
10909
10910
10911
10912
10913
10914
10915
10916
10917 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_C &&
10918 plane_state->base.visible && plane_state->base.crtc_x < 0) {
10919 DRM_DEBUG_KMS("CHV cursor C not allowed to straddle the left screen edge\n");
10920 return -EINVAL;
10921 }
10922
10923 plane_state->ctl = i9xx_cursor_ctl(crtc_state, plane_state);
10924
10925 return 0;
10926 }
10927
10928 static void i9xx_update_cursor(struct intel_plane *plane,
10929 const struct intel_crtc_state *crtc_state,
10930 const struct intel_plane_state *plane_state)
10931 {
10932 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
10933 enum pipe pipe = plane->pipe;
10934 u32 cntl = 0, base = 0, pos = 0, fbc_ctl = 0;
10935 unsigned long irqflags;
10936
10937 if (plane_state && plane_state->base.visible) {
10938 cntl = plane_state->ctl |
10939 i9xx_cursor_ctl_crtc(crtc_state);
10940
10941 if (plane_state->base.crtc_h != plane_state->base.crtc_w)
10942 fbc_ctl = CUR_FBC_CTL_EN | (plane_state->base.crtc_h - 1);
10943
10944 base = intel_cursor_base(plane_state);
10945 pos = intel_cursor_position(plane_state);
10946 }
10947
10948 spin_lock_irqsave(&dev_priv->uncore.lock, irqflags);
10949
10950
10951
10952
10953
10954
10955
10956
10957
10958
10959
10960
10961
10962
10963
10964
10965
10966
10967
10968
10969
10970 if (INTEL_GEN(dev_priv) >= 9)
10971 skl_write_cursor_wm(plane, crtc_state);
10972
10973 if (plane->cursor.base != base ||
10974 plane->cursor.size != fbc_ctl ||
10975 plane->cursor.cntl != cntl) {
10976 if (HAS_CUR_FBC(dev_priv))
10977 I915_WRITE_FW(CUR_FBC_CTL(pipe), fbc_ctl);
10978 I915_WRITE_FW(CURCNTR(pipe), cntl);
10979 I915_WRITE_FW(CURPOS(pipe), pos);
10980 I915_WRITE_FW(CURBASE(pipe), base);
10981
10982 plane->cursor.base = base;
10983 plane->cursor.size = fbc_ctl;
10984 plane->cursor.cntl = cntl;
10985 } else {
10986 I915_WRITE_FW(CURPOS(pipe), pos);
10987 I915_WRITE_FW(CURBASE(pipe), base);
10988 }
10989
10990 spin_unlock_irqrestore(&dev_priv->uncore.lock, irqflags);
10991 }
10992
10993 static void i9xx_disable_cursor(struct intel_plane *plane,
10994 const struct intel_crtc_state *crtc_state)
10995 {
10996 i9xx_update_cursor(plane, crtc_state, NULL);
10997 }
10998
10999 static bool i9xx_cursor_get_hw_state(struct intel_plane *plane,
11000 enum pipe *pipe)
11001 {
11002 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
11003 enum intel_display_power_domain power_domain;
11004 intel_wakeref_t wakeref;
11005 bool ret;
11006 u32 val;
11007
11008
11009
11010
11011
11012
11013 power_domain = POWER_DOMAIN_PIPE(plane->pipe);
11014 wakeref = intel_display_power_get_if_enabled(dev_priv, power_domain);
11015 if (!wakeref)
11016 return false;
11017
11018 val = I915_READ(CURCNTR(plane->pipe));
11019
11020 ret = val & MCURSOR_MODE;
11021
11022 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
11023 *pipe = plane->pipe;
11024 else
11025 *pipe = (val & MCURSOR_PIPE_SELECT_MASK) >>
11026 MCURSOR_PIPE_SELECT_SHIFT;
11027
11028 intel_display_power_put(dev_priv, power_domain, wakeref);
11029
11030 return ret;
11031 }
11032
11033
11034 static const struct drm_display_mode load_detect_mode = {
11035 DRM_MODE("640x480", DRM_MODE_TYPE_DEFAULT, 31500, 640, 664,
11036 704, 832, 0, 480, 489, 491, 520, 0, DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC),
11037 };
11038
11039 struct drm_framebuffer *
11040 intel_framebuffer_create(struct drm_i915_gem_object *obj,
11041 struct drm_mode_fb_cmd2 *mode_cmd)
11042 {
11043 struct intel_framebuffer *intel_fb;
11044 int ret;
11045
11046 intel_fb = kzalloc(sizeof(*intel_fb), GFP_KERNEL);
11047 if (!intel_fb)
11048 return ERR_PTR(-ENOMEM);
11049
11050 ret = intel_framebuffer_init(intel_fb, obj, mode_cmd);
11051 if (ret)
11052 goto err;
11053
11054 return &intel_fb->base;
11055
11056 err:
11057 kfree(intel_fb);
11058 return ERR_PTR(ret);
11059 }
11060
11061 static int intel_modeset_disable_planes(struct drm_atomic_state *state,
11062 struct drm_crtc *crtc)
11063 {
11064 struct drm_plane *plane;
11065 struct drm_plane_state *plane_state;
11066 int ret, i;
11067
11068 ret = drm_atomic_add_affected_planes(state, crtc);
11069 if (ret)
11070 return ret;
11071
11072 for_each_new_plane_in_state(state, plane, plane_state, i) {
11073 if (plane_state->crtc != crtc)
11074 continue;
11075
11076 ret = drm_atomic_set_crtc_for_plane(plane_state, NULL);
11077 if (ret)
11078 return ret;
11079
11080 drm_atomic_set_fb_for_plane(plane_state, NULL);
11081 }
11082
11083 return 0;
11084 }
11085
11086 int intel_get_load_detect_pipe(struct drm_connector *connector,
11087 const struct drm_display_mode *mode,
11088 struct intel_load_detect_pipe *old,
11089 struct drm_modeset_acquire_ctx *ctx)
11090 {
11091 struct intel_crtc *intel_crtc;
11092 struct intel_encoder *intel_encoder =
11093 intel_attached_encoder(connector);
11094 struct drm_crtc *possible_crtc;
11095 struct drm_encoder *encoder = &intel_encoder->base;
11096 struct drm_crtc *crtc = NULL;
11097 struct drm_device *dev = encoder->dev;
11098 struct drm_i915_private *dev_priv = to_i915(dev);
11099 struct drm_mode_config *config = &dev->mode_config;
11100 struct drm_atomic_state *state = NULL, *restore_state = NULL;
11101 struct drm_connector_state *connector_state;
11102 struct intel_crtc_state *crtc_state;
11103 int ret, i = -1;
11104
11105 DRM_DEBUG_KMS("[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
11106 connector->base.id, connector->name,
11107 encoder->base.id, encoder->name);
11108
11109 old->restore_state = NULL;
11110
11111 WARN_ON(!drm_modeset_is_locked(&config->connection_mutex));
11112
11113
11114
11115
11116
11117
11118
11119
11120
11121
11122
11123
11124 if (connector->state->crtc) {
11125 crtc = connector->state->crtc;
11126
11127 ret = drm_modeset_lock(&crtc->mutex, ctx);
11128 if (ret)
11129 goto fail;
11130
11131
11132 goto found;
11133 }
11134
11135
11136 for_each_crtc(dev, possible_crtc) {
11137 i++;
11138 if (!(encoder->possible_crtcs & (1 << i)))
11139 continue;
11140
11141 ret = drm_modeset_lock(&possible_crtc->mutex, ctx);
11142 if (ret)
11143 goto fail;
11144
11145 if (possible_crtc->state->enable) {
11146 drm_modeset_unlock(&possible_crtc->mutex);
11147 continue;
11148 }
11149
11150 crtc = possible_crtc;
11151 break;
11152 }
11153
11154
11155
11156
11157 if (!crtc) {
11158 DRM_DEBUG_KMS("no pipe available for load-detect\n");
11159 ret = -ENODEV;
11160 goto fail;
11161 }
11162
11163 found:
11164 intel_crtc = to_intel_crtc(crtc);
11165
11166 state = drm_atomic_state_alloc(dev);
11167 restore_state = drm_atomic_state_alloc(dev);
11168 if (!state || !restore_state) {
11169 ret = -ENOMEM;
11170 goto fail;
11171 }
11172
11173 state->acquire_ctx = ctx;
11174 restore_state->acquire_ctx = ctx;
11175
11176 connector_state = drm_atomic_get_connector_state(state, connector);
11177 if (IS_ERR(connector_state)) {
11178 ret = PTR_ERR(connector_state);
11179 goto fail;
11180 }
11181
11182 ret = drm_atomic_set_crtc_for_connector(connector_state, crtc);
11183 if (ret)
11184 goto fail;
11185
11186 crtc_state = intel_atomic_get_crtc_state(state, intel_crtc);
11187 if (IS_ERR(crtc_state)) {
11188 ret = PTR_ERR(crtc_state);
11189 goto fail;
11190 }
11191
11192 crtc_state->base.active = crtc_state->base.enable = true;
11193
11194 if (!mode)
11195 mode = &load_detect_mode;
11196
11197 ret = drm_atomic_set_mode_for_crtc(&crtc_state->base, mode);
11198 if (ret)
11199 goto fail;
11200
11201 ret = intel_modeset_disable_planes(state, crtc);
11202 if (ret)
11203 goto fail;
11204
11205 ret = PTR_ERR_OR_ZERO(drm_atomic_get_connector_state(restore_state, connector));
11206 if (!ret)
11207 ret = PTR_ERR_OR_ZERO(drm_atomic_get_crtc_state(restore_state, crtc));
11208 if (!ret)
11209 ret = drm_atomic_add_affected_planes(restore_state, crtc);
11210 if (ret) {
11211 DRM_DEBUG_KMS("Failed to create a copy of old state to restore: %i\n", ret);
11212 goto fail;
11213 }
11214
11215 ret = drm_atomic_commit(state);
11216 if (ret) {
11217 DRM_DEBUG_KMS("failed to set mode on load-detect pipe\n");
11218 goto fail;
11219 }
11220
11221 old->restore_state = restore_state;
11222 drm_atomic_state_put(state);
11223
11224
11225 intel_wait_for_vblank(dev_priv, intel_crtc->pipe);
11226 return true;
11227
11228 fail:
11229 if (state) {
11230 drm_atomic_state_put(state);
11231 state = NULL;
11232 }
11233 if (restore_state) {
11234 drm_atomic_state_put(restore_state);
11235 restore_state = NULL;
11236 }
11237
11238 if (ret == -EDEADLK)
11239 return ret;
11240
11241 return false;
11242 }
11243
11244 void intel_release_load_detect_pipe(struct drm_connector *connector,
11245 struct intel_load_detect_pipe *old,
11246 struct drm_modeset_acquire_ctx *ctx)
11247 {
11248 struct intel_encoder *intel_encoder =
11249 intel_attached_encoder(connector);
11250 struct drm_encoder *encoder = &intel_encoder->base;
11251 struct drm_atomic_state *state = old->restore_state;
11252 int ret;
11253
11254 DRM_DEBUG_KMS("[CONNECTOR:%d:%s], [ENCODER:%d:%s]\n",
11255 connector->base.id, connector->name,
11256 encoder->base.id, encoder->name);
11257
11258 if (!state)
11259 return;
11260
11261 ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
11262 if (ret)
11263 DRM_DEBUG_KMS("Couldn't release load detect pipe: %i\n", ret);
11264 drm_atomic_state_put(state);
11265 }
11266
11267 static int i9xx_pll_refclk(struct drm_device *dev,
11268 const struct intel_crtc_state *pipe_config)
11269 {
11270 struct drm_i915_private *dev_priv = to_i915(dev);
11271 u32 dpll = pipe_config->dpll_hw_state.dpll;
11272
11273 if ((dpll & PLL_REF_INPUT_MASK) == PLLB_REF_INPUT_SPREADSPECTRUMIN)
11274 return dev_priv->vbt.lvds_ssc_freq;
11275 else if (HAS_PCH_SPLIT(dev_priv))
11276 return 120000;
11277 else if (!IS_GEN(dev_priv, 2))
11278 return 96000;
11279 else
11280 return 48000;
11281 }
11282
11283
11284 static void i9xx_crtc_clock_get(struct intel_crtc *crtc,
11285 struct intel_crtc_state *pipe_config)
11286 {
11287 struct drm_device *dev = crtc->base.dev;
11288 struct drm_i915_private *dev_priv = to_i915(dev);
11289 int pipe = pipe_config->cpu_transcoder;
11290 u32 dpll = pipe_config->dpll_hw_state.dpll;
11291 u32 fp;
11292 struct dpll clock;
11293 int port_clock;
11294 int refclk = i9xx_pll_refclk(dev, pipe_config);
11295
11296 if ((dpll & DISPLAY_RATE_SELECT_FPA1) == 0)
11297 fp = pipe_config->dpll_hw_state.fp0;
11298 else
11299 fp = pipe_config->dpll_hw_state.fp1;
11300
11301 clock.m1 = (fp & FP_M1_DIV_MASK) >> FP_M1_DIV_SHIFT;
11302 if (IS_PINEVIEW(dev_priv)) {
11303 clock.n = ffs((fp & FP_N_PINEVIEW_DIV_MASK) >> FP_N_DIV_SHIFT) - 1;
11304 clock.m2 = (fp & FP_M2_PINEVIEW_DIV_MASK) >> FP_M2_DIV_SHIFT;
11305 } else {
11306 clock.n = (fp & FP_N_DIV_MASK) >> FP_N_DIV_SHIFT;
11307 clock.m2 = (fp & FP_M2_DIV_MASK) >> FP_M2_DIV_SHIFT;
11308 }
11309
11310 if (!IS_GEN(dev_priv, 2)) {
11311 if (IS_PINEVIEW(dev_priv))
11312 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_PINEVIEW) >>
11313 DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW);
11314 else
11315 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK) >>
11316 DPLL_FPA01_P1_POST_DIV_SHIFT);
11317
11318 switch (dpll & DPLL_MODE_MASK) {
11319 case DPLLB_MODE_DAC_SERIAL:
11320 clock.p2 = dpll & DPLL_DAC_SERIAL_P2_CLOCK_DIV_5 ?
11321 5 : 10;
11322 break;
11323 case DPLLB_MODE_LVDS:
11324 clock.p2 = dpll & DPLLB_LVDS_P2_CLOCK_DIV_7 ?
11325 7 : 14;
11326 break;
11327 default:
11328 DRM_DEBUG_KMS("Unknown DPLL mode %08x in programmed "
11329 "mode\n", (int)(dpll & DPLL_MODE_MASK));
11330 return;
11331 }
11332
11333 if (IS_PINEVIEW(dev_priv))
11334 port_clock = pnv_calc_dpll_params(refclk, &clock);
11335 else
11336 port_clock = i9xx_calc_dpll_params(refclk, &clock);
11337 } else {
11338 u32 lvds = IS_I830(dev_priv) ? 0 : I915_READ(LVDS);
11339 bool is_lvds = (pipe == 1) && (lvds & LVDS_PORT_EN);
11340
11341 if (is_lvds) {
11342 clock.p1 = ffs((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830_LVDS) >>
11343 DPLL_FPA01_P1_POST_DIV_SHIFT);
11344
11345 if (lvds & LVDS_CLKB_POWER_UP)
11346 clock.p2 = 7;
11347 else
11348 clock.p2 = 14;
11349 } else {
11350 if (dpll & PLL_P1_DIVIDE_BY_TWO)
11351 clock.p1 = 2;
11352 else {
11353 clock.p1 = ((dpll & DPLL_FPA01_P1_POST_DIV_MASK_I830) >>
11354 DPLL_FPA01_P1_POST_DIV_SHIFT) + 2;
11355 }
11356 if (dpll & PLL_P2_DIVIDE_BY_4)
11357 clock.p2 = 4;
11358 else
11359 clock.p2 = 2;
11360 }
11361
11362 port_clock = i9xx_calc_dpll_params(refclk, &clock);
11363 }
11364
11365
11366
11367
11368
11369
11370 pipe_config->port_clock = port_clock;
11371 }
11372
11373 int intel_dotclock_calculate(int link_freq,
11374 const struct intel_link_m_n *m_n)
11375 {
11376
11377
11378
11379
11380
11381
11382
11383
11384
11385
11386 if (!m_n->link_n)
11387 return 0;
11388
11389 return div_u64(mul_u32_u32(m_n->link_m, link_freq), m_n->link_n);
11390 }
11391
11392 static void ironlake_pch_clock_get(struct intel_crtc *crtc,
11393 struct intel_crtc_state *pipe_config)
11394 {
11395 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11396
11397
11398 i9xx_crtc_clock_get(crtc, pipe_config);
11399
11400
11401
11402
11403
11404
11405 pipe_config->base.adjusted_mode.crtc_clock =
11406 intel_dotclock_calculate(intel_fdi_link_freq(dev_priv, pipe_config),
11407 &pipe_config->fdi_m_n);
11408 }
11409
11410
11411 struct drm_display_mode *
11412 intel_encoder_current_mode(struct intel_encoder *encoder)
11413 {
11414 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
11415 struct intel_crtc_state *crtc_state;
11416 struct drm_display_mode *mode;
11417 struct intel_crtc *crtc;
11418 enum pipe pipe;
11419
11420 if (!encoder->get_hw_state(encoder, &pipe))
11421 return NULL;
11422
11423 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
11424
11425 mode = kzalloc(sizeof(*mode), GFP_KERNEL);
11426 if (!mode)
11427 return NULL;
11428
11429 crtc_state = kzalloc(sizeof(*crtc_state), GFP_KERNEL);
11430 if (!crtc_state) {
11431 kfree(mode);
11432 return NULL;
11433 }
11434
11435 crtc_state->base.crtc = &crtc->base;
11436
11437 if (!dev_priv->display.get_pipe_config(crtc, crtc_state)) {
11438 kfree(crtc_state);
11439 kfree(mode);
11440 return NULL;
11441 }
11442
11443 encoder->get_config(encoder, crtc_state);
11444
11445 intel_mode_from_pipe_config(mode, crtc_state);
11446
11447 kfree(crtc_state);
11448
11449 return mode;
11450 }
11451
11452 static void intel_crtc_destroy(struct drm_crtc *crtc)
11453 {
11454 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
11455
11456 drm_crtc_cleanup(crtc);
11457 kfree(intel_crtc);
11458 }
11459
11460
11461
11462
11463
11464
11465
11466
11467
11468
11469
11470 static bool intel_wm_need_update(const struct intel_plane_state *cur,
11471 struct intel_plane_state *new)
11472 {
11473
11474 if (new->base.visible != cur->base.visible)
11475 return true;
11476
11477 if (!cur->base.fb || !new->base.fb)
11478 return false;
11479
11480 if (cur->base.fb->modifier != new->base.fb->modifier ||
11481 cur->base.rotation != new->base.rotation ||
11482 drm_rect_width(&new->base.src) != drm_rect_width(&cur->base.src) ||
11483 drm_rect_height(&new->base.src) != drm_rect_height(&cur->base.src) ||
11484 drm_rect_width(&new->base.dst) != drm_rect_width(&cur->base.dst) ||
11485 drm_rect_height(&new->base.dst) != drm_rect_height(&cur->base.dst))
11486 return true;
11487
11488 return false;
11489 }
11490
11491 static bool needs_scaling(const struct intel_plane_state *state)
11492 {
11493 int src_w = drm_rect_width(&state->base.src) >> 16;
11494 int src_h = drm_rect_height(&state->base.src) >> 16;
11495 int dst_w = drm_rect_width(&state->base.dst);
11496 int dst_h = drm_rect_height(&state->base.dst);
11497
11498 return (src_w != dst_w || src_h != dst_h);
11499 }
11500
11501 int intel_plane_atomic_calc_changes(const struct intel_crtc_state *old_crtc_state,
11502 struct intel_crtc_state *crtc_state,
11503 const struct intel_plane_state *old_plane_state,
11504 struct intel_plane_state *plane_state)
11505 {
11506 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
11507 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
11508 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11509 bool mode_changed = needs_modeset(crtc_state);
11510 bool was_crtc_enabled = old_crtc_state->base.active;
11511 bool is_crtc_enabled = crtc_state->base.active;
11512 bool turn_off, turn_on, visible, was_visible;
11513 struct drm_framebuffer *fb = plane_state->base.fb;
11514 int ret;
11515
11516 if (INTEL_GEN(dev_priv) >= 9 && plane->id != PLANE_CURSOR) {
11517 ret = skl_update_scaler_plane(crtc_state, plane_state);
11518 if (ret)
11519 return ret;
11520 }
11521
11522 was_visible = old_plane_state->base.visible;
11523 visible = plane_state->base.visible;
11524
11525 if (!was_crtc_enabled && WARN_ON(was_visible))
11526 was_visible = false;
11527
11528
11529
11530
11531
11532
11533
11534
11535
11536
11537
11538 if (!is_crtc_enabled) {
11539 plane_state->base.visible = visible = false;
11540 crtc_state->active_planes &= ~BIT(plane->id);
11541 crtc_state->data_rate[plane->id] = 0;
11542 }
11543
11544 if (!was_visible && !visible)
11545 return 0;
11546
11547 if (fb != old_plane_state->base.fb)
11548 crtc_state->fb_changed = true;
11549
11550 turn_off = was_visible && (!visible || mode_changed);
11551 turn_on = visible && (!was_visible || mode_changed);
11552
11553 DRM_DEBUG_ATOMIC("[CRTC:%d:%s] has [PLANE:%d:%s] with fb %i\n",
11554 crtc->base.base.id, crtc->base.name,
11555 plane->base.base.id, plane->base.name,
11556 fb ? fb->base.id : -1);
11557
11558 DRM_DEBUG_ATOMIC("[PLANE:%d:%s] visible %i -> %i, off %i, on %i, ms %i\n",
11559 plane->base.base.id, plane->base.name,
11560 was_visible, visible,
11561 turn_off, turn_on, mode_changed);
11562
11563 if (turn_on) {
11564 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
11565 crtc_state->update_wm_pre = true;
11566
11567
11568 if (plane->id != PLANE_CURSOR)
11569 crtc_state->disable_cxsr = true;
11570 } else if (turn_off) {
11571 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv))
11572 crtc_state->update_wm_post = true;
11573
11574
11575 if (plane->id != PLANE_CURSOR)
11576 crtc_state->disable_cxsr = true;
11577 } else if (intel_wm_need_update(old_plane_state, plane_state)) {
11578 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv)) {
11579
11580 crtc_state->update_wm_pre = true;
11581 crtc_state->update_wm_post = true;
11582 }
11583 }
11584
11585 if (visible || was_visible)
11586 crtc_state->fb_bits |= plane->frontbuffer_bit;
11587
11588
11589
11590
11591
11592
11593
11594
11595
11596
11597
11598
11599
11600
11601
11602
11603
11604
11605
11606
11607
11608
11609
11610
11611
11612
11613
11614
11615
11616
11617
11618
11619
11620
11621 if (plane->id != PLANE_CURSOR &&
11622 (IS_GEN_RANGE(dev_priv, 5, 6) ||
11623 IS_IVYBRIDGE(dev_priv)) &&
11624 (turn_on || (!needs_scaling(old_plane_state) &&
11625 needs_scaling(plane_state))))
11626 crtc_state->disable_lp_wm = true;
11627
11628 return 0;
11629 }
11630
11631 static bool encoders_cloneable(const struct intel_encoder *a,
11632 const struct intel_encoder *b)
11633 {
11634
11635 return a == b || (a->cloneable & (1 << b->type) &&
11636 b->cloneable & (1 << a->type));
11637 }
11638
11639 static bool check_single_encoder_cloning(struct drm_atomic_state *state,
11640 struct intel_crtc *crtc,
11641 struct intel_encoder *encoder)
11642 {
11643 struct intel_encoder *source_encoder;
11644 struct drm_connector *connector;
11645 struct drm_connector_state *connector_state;
11646 int i;
11647
11648 for_each_new_connector_in_state(state, connector, connector_state, i) {
11649 if (connector_state->crtc != &crtc->base)
11650 continue;
11651
11652 source_encoder =
11653 to_intel_encoder(connector_state->best_encoder);
11654 if (!encoders_cloneable(encoder, source_encoder))
11655 return false;
11656 }
11657
11658 return true;
11659 }
11660
11661 static int icl_add_linked_planes(struct intel_atomic_state *state)
11662 {
11663 struct intel_plane *plane, *linked;
11664 struct intel_plane_state *plane_state, *linked_plane_state;
11665 int i;
11666
11667 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
11668 linked = plane_state->linked_plane;
11669
11670 if (!linked)
11671 continue;
11672
11673 linked_plane_state = intel_atomic_get_plane_state(state, linked);
11674 if (IS_ERR(linked_plane_state))
11675 return PTR_ERR(linked_plane_state);
11676
11677 WARN_ON(linked_plane_state->linked_plane != plane);
11678 WARN_ON(linked_plane_state->slave == plane_state->slave);
11679 }
11680
11681 return 0;
11682 }
11683
11684 static int icl_check_nv12_planes(struct intel_crtc_state *crtc_state)
11685 {
11686 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
11687 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11688 struct intel_atomic_state *state = to_intel_atomic_state(crtc_state->base.state);
11689 struct intel_plane *plane, *linked;
11690 struct intel_plane_state *plane_state;
11691 int i;
11692
11693 if (INTEL_GEN(dev_priv) < 11)
11694 return 0;
11695
11696
11697
11698
11699
11700 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
11701 if (plane->pipe != crtc->pipe || !plane_state->linked_plane)
11702 continue;
11703
11704 plane_state->linked_plane = NULL;
11705 if (plane_state->slave && !plane_state->base.visible) {
11706 crtc_state->active_planes &= ~BIT(plane->id);
11707 crtc_state->update_planes |= BIT(plane->id);
11708 }
11709
11710 plane_state->slave = false;
11711 }
11712
11713 if (!crtc_state->nv12_planes)
11714 return 0;
11715
11716 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
11717 struct intel_plane_state *linked_state = NULL;
11718
11719 if (plane->pipe != crtc->pipe ||
11720 !(crtc_state->nv12_planes & BIT(plane->id)))
11721 continue;
11722
11723 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, linked) {
11724 if (!icl_is_nv12_y_plane(linked->id))
11725 continue;
11726
11727 if (crtc_state->active_planes & BIT(linked->id))
11728 continue;
11729
11730 linked_state = intel_atomic_get_plane_state(state, linked);
11731 if (IS_ERR(linked_state))
11732 return PTR_ERR(linked_state);
11733
11734 break;
11735 }
11736
11737 if (!linked_state) {
11738 DRM_DEBUG_KMS("Need %d free Y planes for planar YUV\n",
11739 hweight8(crtc_state->nv12_planes));
11740
11741 return -EINVAL;
11742 }
11743
11744 plane_state->linked_plane = linked;
11745
11746 linked_state->slave = true;
11747 linked_state->linked_plane = plane;
11748 crtc_state->active_planes |= BIT(linked->id);
11749 crtc_state->update_planes |= BIT(linked->id);
11750 DRM_DEBUG_KMS("Using %s as Y plane for %s\n", linked->base.name, plane->base.name);
11751 }
11752
11753 return 0;
11754 }
11755
11756 static bool c8_planes_changed(const struct intel_crtc_state *new_crtc_state)
11757 {
11758 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->base.crtc);
11759 struct intel_atomic_state *state =
11760 to_intel_atomic_state(new_crtc_state->base.state);
11761 const struct intel_crtc_state *old_crtc_state =
11762 intel_atomic_get_old_crtc_state(state, crtc);
11763
11764 return !old_crtc_state->c8_planes != !new_crtc_state->c8_planes;
11765 }
11766
11767 static int intel_crtc_atomic_check(struct drm_crtc *crtc,
11768 struct drm_crtc_state *crtc_state)
11769 {
11770 struct drm_i915_private *dev_priv = to_i915(crtc->dev);
11771 struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
11772 struct intel_crtc_state *pipe_config =
11773 to_intel_crtc_state(crtc_state);
11774 int ret;
11775 bool mode_changed = needs_modeset(pipe_config);
11776
11777 if (INTEL_GEN(dev_priv) < 5 && !IS_G4X(dev_priv) &&
11778 mode_changed && !crtc_state->active)
11779 pipe_config->update_wm_post = true;
11780
11781 if (mode_changed && crtc_state->enable &&
11782 dev_priv->display.crtc_compute_clock &&
11783 !WARN_ON(pipe_config->shared_dpll)) {
11784 ret = dev_priv->display.crtc_compute_clock(intel_crtc,
11785 pipe_config);
11786 if (ret)
11787 return ret;
11788 }
11789
11790
11791
11792
11793
11794 if (c8_planes_changed(pipe_config))
11795 crtc_state->color_mgmt_changed = true;
11796
11797 if (mode_changed || pipe_config->update_pipe ||
11798 crtc_state->color_mgmt_changed) {
11799 ret = intel_color_check(pipe_config);
11800 if (ret)
11801 return ret;
11802 }
11803
11804 ret = 0;
11805 if (dev_priv->display.compute_pipe_wm) {
11806 ret = dev_priv->display.compute_pipe_wm(pipe_config);
11807 if (ret) {
11808 DRM_DEBUG_KMS("Target pipe watermarks are invalid\n");
11809 return ret;
11810 }
11811 }
11812
11813 if (dev_priv->display.compute_intermediate_wm) {
11814 if (WARN_ON(!dev_priv->display.compute_pipe_wm))
11815 return 0;
11816
11817
11818
11819
11820
11821
11822 ret = dev_priv->display.compute_intermediate_wm(pipe_config);
11823 if (ret) {
11824 DRM_DEBUG_KMS("No valid intermediate pipe watermarks are possible\n");
11825 return ret;
11826 }
11827 }
11828
11829 if (INTEL_GEN(dev_priv) >= 9) {
11830 if (mode_changed || pipe_config->update_pipe)
11831 ret = skl_update_scaler_crtc(pipe_config);
11832
11833 if (!ret)
11834 ret = icl_check_nv12_planes(pipe_config);
11835 if (!ret)
11836 ret = skl_check_pipe_max_pixel_rate(intel_crtc,
11837 pipe_config);
11838 if (!ret)
11839 ret = intel_atomic_setup_scalers(dev_priv, intel_crtc,
11840 pipe_config);
11841 }
11842
11843 if (HAS_IPS(dev_priv))
11844 pipe_config->ips_enabled = hsw_compute_ips_config(pipe_config);
11845
11846 return ret;
11847 }
11848
11849 static const struct drm_crtc_helper_funcs intel_helper_funcs = {
11850 .atomic_check = intel_crtc_atomic_check,
11851 };
11852
11853 static void intel_modeset_update_connector_atomic_state(struct drm_device *dev)
11854 {
11855 struct intel_connector *connector;
11856 struct drm_connector_list_iter conn_iter;
11857
11858 drm_connector_list_iter_begin(dev, &conn_iter);
11859 for_each_intel_connector_iter(connector, &conn_iter) {
11860 if (connector->base.state->crtc)
11861 drm_connector_put(&connector->base);
11862
11863 if (connector->base.encoder) {
11864 connector->base.state->best_encoder =
11865 connector->base.encoder;
11866 connector->base.state->crtc =
11867 connector->base.encoder->crtc;
11868
11869 drm_connector_get(&connector->base);
11870 } else {
11871 connector->base.state->best_encoder = NULL;
11872 connector->base.state->crtc = NULL;
11873 }
11874 }
11875 drm_connector_list_iter_end(&conn_iter);
11876 }
11877
11878 static int
11879 compute_sink_pipe_bpp(const struct drm_connector_state *conn_state,
11880 struct intel_crtc_state *pipe_config)
11881 {
11882 struct drm_connector *connector = conn_state->connector;
11883 const struct drm_display_info *info = &connector->display_info;
11884 int bpp;
11885
11886 switch (conn_state->max_bpc) {
11887 case 6 ... 7:
11888 bpp = 6 * 3;
11889 break;
11890 case 8 ... 9:
11891 bpp = 8 * 3;
11892 break;
11893 case 10 ... 11:
11894 bpp = 10 * 3;
11895 break;
11896 case 12:
11897 bpp = 12 * 3;
11898 break;
11899 default:
11900 return -EINVAL;
11901 }
11902
11903 if (bpp < pipe_config->pipe_bpp) {
11904 DRM_DEBUG_KMS("[CONNECTOR:%d:%s] Limiting display bpp to %d instead of "
11905 "EDID bpp %d, requested bpp %d, max platform bpp %d\n",
11906 connector->base.id, connector->name,
11907 bpp, 3 * info->bpc, 3 * conn_state->max_requested_bpc,
11908 pipe_config->pipe_bpp);
11909
11910 pipe_config->pipe_bpp = bpp;
11911 }
11912
11913 return 0;
11914 }
11915
11916 static int
11917 compute_baseline_pipe_bpp(struct intel_crtc *crtc,
11918 struct intel_crtc_state *pipe_config)
11919 {
11920 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
11921 struct drm_atomic_state *state = pipe_config->base.state;
11922 struct drm_connector *connector;
11923 struct drm_connector_state *connector_state;
11924 int bpp, i;
11925
11926 if ((IS_G4X(dev_priv) || IS_VALLEYVIEW(dev_priv) ||
11927 IS_CHERRYVIEW(dev_priv)))
11928 bpp = 10*3;
11929 else if (INTEL_GEN(dev_priv) >= 5)
11930 bpp = 12*3;
11931 else
11932 bpp = 8*3;
11933
11934 pipe_config->pipe_bpp = bpp;
11935
11936
11937 for_each_new_connector_in_state(state, connector, connector_state, i) {
11938 int ret;
11939
11940 if (connector_state->crtc != &crtc->base)
11941 continue;
11942
11943 ret = compute_sink_pipe_bpp(connector_state, pipe_config);
11944 if (ret)
11945 return ret;
11946 }
11947
11948 return 0;
11949 }
11950
11951 static void intel_dump_crtc_timings(const struct drm_display_mode *mode)
11952 {
11953 DRM_DEBUG_KMS("crtc timings: %d %d %d %d %d %d %d %d %d, "
11954 "type: 0x%x flags: 0x%x\n",
11955 mode->crtc_clock,
11956 mode->crtc_hdisplay, mode->crtc_hsync_start,
11957 mode->crtc_hsync_end, mode->crtc_htotal,
11958 mode->crtc_vdisplay, mode->crtc_vsync_start,
11959 mode->crtc_vsync_end, mode->crtc_vtotal,
11960 mode->type, mode->flags);
11961 }
11962
11963 static inline void
11964 intel_dump_m_n_config(const struct intel_crtc_state *pipe_config,
11965 const char *id, unsigned int lane_count,
11966 const struct intel_link_m_n *m_n)
11967 {
11968 DRM_DEBUG_KMS("%s: lanes: %i; gmch_m: %u, gmch_n: %u, link_m: %u, link_n: %u, tu: %u\n",
11969 id, lane_count,
11970 m_n->gmch_m, m_n->gmch_n,
11971 m_n->link_m, m_n->link_n, m_n->tu);
11972 }
11973
11974 static void
11975 intel_dump_infoframe(struct drm_i915_private *dev_priv,
11976 const union hdmi_infoframe *frame)
11977 {
11978 if ((drm_debug & DRM_UT_KMS) == 0)
11979 return;
11980
11981 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, frame);
11982 }
11983
11984 #define OUTPUT_TYPE(x) [INTEL_OUTPUT_ ## x] = #x
11985
11986 static const char * const output_type_str[] = {
11987 OUTPUT_TYPE(UNUSED),
11988 OUTPUT_TYPE(ANALOG),
11989 OUTPUT_TYPE(DVO),
11990 OUTPUT_TYPE(SDVO),
11991 OUTPUT_TYPE(LVDS),
11992 OUTPUT_TYPE(TVOUT),
11993 OUTPUT_TYPE(HDMI),
11994 OUTPUT_TYPE(DP),
11995 OUTPUT_TYPE(EDP),
11996 OUTPUT_TYPE(DSI),
11997 OUTPUT_TYPE(DDI),
11998 OUTPUT_TYPE(DP_MST),
11999 };
12000
12001 #undef OUTPUT_TYPE
12002
12003 static void snprintf_output_types(char *buf, size_t len,
12004 unsigned int output_types)
12005 {
12006 char *str = buf;
12007 int i;
12008
12009 str[0] = '\0';
12010
12011 for (i = 0; i < ARRAY_SIZE(output_type_str); i++) {
12012 int r;
12013
12014 if ((output_types & BIT(i)) == 0)
12015 continue;
12016
12017 r = snprintf(str, len, "%s%s",
12018 str != buf ? "," : "", output_type_str[i]);
12019 if (r >= len)
12020 break;
12021 str += r;
12022 len -= r;
12023
12024 output_types &= ~BIT(i);
12025 }
12026
12027 WARN_ON_ONCE(output_types != 0);
12028 }
12029
12030 static const char * const output_format_str[] = {
12031 [INTEL_OUTPUT_FORMAT_INVALID] = "Invalid",
12032 [INTEL_OUTPUT_FORMAT_RGB] = "RGB",
12033 [INTEL_OUTPUT_FORMAT_YCBCR420] = "YCBCR4:2:0",
12034 [INTEL_OUTPUT_FORMAT_YCBCR444] = "YCBCR4:4:4",
12035 };
12036
12037 static const char *output_formats(enum intel_output_format format)
12038 {
12039 if (format >= ARRAY_SIZE(output_format_str))
12040 format = INTEL_OUTPUT_FORMAT_INVALID;
12041 return output_format_str[format];
12042 }
12043
12044 static void intel_dump_plane_state(const struct intel_plane_state *plane_state)
12045 {
12046 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
12047 const struct drm_framebuffer *fb = plane_state->base.fb;
12048 struct drm_format_name_buf format_name;
12049
12050 if (!fb) {
12051 DRM_DEBUG_KMS("[PLANE:%d:%s] fb: [NOFB], visible: %s\n",
12052 plane->base.base.id, plane->base.name,
12053 yesno(plane_state->base.visible));
12054 return;
12055 }
12056
12057 DRM_DEBUG_KMS("[PLANE:%d:%s] fb: [FB:%d] %ux%u format = %s, visible: %s\n",
12058 plane->base.base.id, plane->base.name,
12059 fb->base.id, fb->width, fb->height,
12060 drm_get_format_name(fb->format->format, &format_name),
12061 yesno(plane_state->base.visible));
12062 DRM_DEBUG_KMS("\trotation: 0x%x, scaler: %d\n",
12063 plane_state->base.rotation, plane_state->scaler_id);
12064 if (plane_state->base.visible)
12065 DRM_DEBUG_KMS("\tsrc: " DRM_RECT_FP_FMT " dst: " DRM_RECT_FMT "\n",
12066 DRM_RECT_FP_ARG(&plane_state->base.src),
12067 DRM_RECT_ARG(&plane_state->base.dst));
12068 }
12069
12070 static void intel_dump_pipe_config(const struct intel_crtc_state *pipe_config,
12071 struct intel_atomic_state *state,
12072 const char *context)
12073 {
12074 struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc);
12075 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12076 const struct intel_plane_state *plane_state;
12077 struct intel_plane *plane;
12078 char buf[64];
12079 int i;
12080
12081 DRM_DEBUG_KMS("[CRTC:%d:%s] enable: %s %s\n",
12082 crtc->base.base.id, crtc->base.name,
12083 yesno(pipe_config->base.enable), context);
12084
12085 if (!pipe_config->base.enable)
12086 goto dump_planes;
12087
12088 snprintf_output_types(buf, sizeof(buf), pipe_config->output_types);
12089 DRM_DEBUG_KMS("active: %s, output_types: %s (0x%x), output format: %s\n",
12090 yesno(pipe_config->base.active),
12091 buf, pipe_config->output_types,
12092 output_formats(pipe_config->output_format));
12093
12094 DRM_DEBUG_KMS("cpu_transcoder: %s, pipe bpp: %i, dithering: %i\n",
12095 transcoder_name(pipe_config->cpu_transcoder),
12096 pipe_config->pipe_bpp, pipe_config->dither);
12097
12098 if (pipe_config->has_pch_encoder)
12099 intel_dump_m_n_config(pipe_config, "fdi",
12100 pipe_config->fdi_lanes,
12101 &pipe_config->fdi_m_n);
12102
12103 if (intel_crtc_has_dp_encoder(pipe_config)) {
12104 intel_dump_m_n_config(pipe_config, "dp m_n",
12105 pipe_config->lane_count, &pipe_config->dp_m_n);
12106 if (pipe_config->has_drrs)
12107 intel_dump_m_n_config(pipe_config, "dp m2_n2",
12108 pipe_config->lane_count,
12109 &pipe_config->dp_m2_n2);
12110 }
12111
12112 DRM_DEBUG_KMS("audio: %i, infoframes: %i, infoframes enabled: 0x%x\n",
12113 pipe_config->has_audio, pipe_config->has_infoframe,
12114 pipe_config->infoframes.enable);
12115
12116 if (pipe_config->infoframes.enable &
12117 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GENERAL_CONTROL))
12118 DRM_DEBUG_KMS("GCP: 0x%x\n", pipe_config->infoframes.gcp);
12119 if (pipe_config->infoframes.enable &
12120 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_AVI))
12121 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.avi);
12122 if (pipe_config->infoframes.enable &
12123 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_SPD))
12124 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.spd);
12125 if (pipe_config->infoframes.enable &
12126 intel_hdmi_infoframe_enable(HDMI_INFOFRAME_TYPE_VENDOR))
12127 intel_dump_infoframe(dev_priv, &pipe_config->infoframes.hdmi);
12128
12129 DRM_DEBUG_KMS("requested mode:\n");
12130 drm_mode_debug_printmodeline(&pipe_config->base.mode);
12131 DRM_DEBUG_KMS("adjusted mode:\n");
12132 drm_mode_debug_printmodeline(&pipe_config->base.adjusted_mode);
12133 intel_dump_crtc_timings(&pipe_config->base.adjusted_mode);
12134 DRM_DEBUG_KMS("port clock: %d, pipe src size: %dx%d, pixel rate %d\n",
12135 pipe_config->port_clock,
12136 pipe_config->pipe_src_w, pipe_config->pipe_src_h,
12137 pipe_config->pixel_rate);
12138
12139 if (INTEL_GEN(dev_priv) >= 9)
12140 DRM_DEBUG_KMS("num_scalers: %d, scaler_users: 0x%x, scaler_id: %d\n",
12141 crtc->num_scalers,
12142 pipe_config->scaler_state.scaler_users,
12143 pipe_config->scaler_state.scaler_id);
12144
12145 if (HAS_GMCH(dev_priv))
12146 DRM_DEBUG_KMS("gmch pfit: control: 0x%08x, ratios: 0x%08x, lvds border: 0x%08x\n",
12147 pipe_config->gmch_pfit.control,
12148 pipe_config->gmch_pfit.pgm_ratios,
12149 pipe_config->gmch_pfit.lvds_border_bits);
12150 else
12151 DRM_DEBUG_KMS("pch pfit: pos: 0x%08x, size: 0x%08x, %s, force thru: %s\n",
12152 pipe_config->pch_pfit.pos,
12153 pipe_config->pch_pfit.size,
12154 enableddisabled(pipe_config->pch_pfit.enabled),
12155 yesno(pipe_config->pch_pfit.force_thru));
12156
12157 DRM_DEBUG_KMS("ips: %i, double wide: %i\n",
12158 pipe_config->ips_enabled, pipe_config->double_wide);
12159
12160 intel_dpll_dump_hw_state(dev_priv, &pipe_config->dpll_hw_state);
12161
12162 dump_planes:
12163 if (!state)
12164 return;
12165
12166 for_each_new_intel_plane_in_state(state, plane, plane_state, i) {
12167 if (plane->pipe == crtc->pipe)
12168 intel_dump_plane_state(plane_state);
12169 }
12170 }
12171
12172 static bool check_digital_port_conflicts(struct intel_atomic_state *state)
12173 {
12174 struct drm_device *dev = state->base.dev;
12175 struct drm_connector *connector;
12176 struct drm_connector_list_iter conn_iter;
12177 unsigned int used_ports = 0;
12178 unsigned int used_mst_ports = 0;
12179 bool ret = true;
12180
12181
12182
12183
12184
12185
12186 drm_connector_list_iter_begin(dev, &conn_iter);
12187 drm_for_each_connector_iter(connector, &conn_iter) {
12188 struct drm_connector_state *connector_state;
12189 struct intel_encoder *encoder;
12190
12191 connector_state =
12192 drm_atomic_get_new_connector_state(&state->base,
12193 connector);
12194 if (!connector_state)
12195 connector_state = connector->state;
12196
12197 if (!connector_state->best_encoder)
12198 continue;
12199
12200 encoder = to_intel_encoder(connector_state->best_encoder);
12201
12202 WARN_ON(!connector_state->crtc);
12203
12204 switch (encoder->type) {
12205 unsigned int port_mask;
12206 case INTEL_OUTPUT_DDI:
12207 if (WARN_ON(!HAS_DDI(to_i915(dev))))
12208 break;
12209
12210 case INTEL_OUTPUT_DP:
12211 case INTEL_OUTPUT_HDMI:
12212 case INTEL_OUTPUT_EDP:
12213 port_mask = 1 << encoder->port;
12214
12215
12216 if (used_ports & port_mask)
12217 ret = false;
12218
12219 used_ports |= port_mask;
12220 break;
12221 case INTEL_OUTPUT_DP_MST:
12222 used_mst_ports |=
12223 1 << encoder->port;
12224 break;
12225 default:
12226 break;
12227 }
12228 }
12229 drm_connector_list_iter_end(&conn_iter);
12230
12231
12232 if (used_ports & used_mst_ports)
12233 return false;
12234
12235 return ret;
12236 }
12237
12238 static int
12239 clear_intel_crtc_state(struct intel_crtc_state *crtc_state)
12240 {
12241 struct drm_i915_private *dev_priv =
12242 to_i915(crtc_state->base.crtc->dev);
12243 struct intel_crtc_state *saved_state;
12244
12245 saved_state = kzalloc(sizeof(*saved_state), GFP_KERNEL);
12246 if (!saved_state)
12247 return -ENOMEM;
12248
12249
12250
12251
12252
12253
12254 saved_state->scaler_state = crtc_state->scaler_state;
12255 saved_state->shared_dpll = crtc_state->shared_dpll;
12256 saved_state->dpll_hw_state = crtc_state->dpll_hw_state;
12257 memcpy(saved_state->icl_port_dplls, crtc_state->icl_port_dplls,
12258 sizeof(saved_state->icl_port_dplls));
12259 saved_state->crc_enabled = crtc_state->crc_enabled;
12260 if (IS_G4X(dev_priv) ||
12261 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
12262 saved_state->wm = crtc_state->wm;
12263
12264
12265 BUILD_BUG_ON(offsetof(struct intel_crtc_state, base));
12266 memcpy(&crtc_state->base + 1, &saved_state->base + 1,
12267 sizeof(*crtc_state) - sizeof(crtc_state->base));
12268
12269 kfree(saved_state);
12270 return 0;
12271 }
12272
12273 static int
12274 intel_modeset_pipe_config(struct intel_crtc_state *pipe_config)
12275 {
12276 struct drm_crtc *crtc = pipe_config->base.crtc;
12277 struct drm_atomic_state *state = pipe_config->base.state;
12278 struct intel_encoder *encoder;
12279 struct drm_connector *connector;
12280 struct drm_connector_state *connector_state;
12281 int base_bpp, ret;
12282 int i;
12283 bool retry = true;
12284
12285 ret = clear_intel_crtc_state(pipe_config);
12286 if (ret)
12287 return ret;
12288
12289 pipe_config->cpu_transcoder =
12290 (enum transcoder) to_intel_crtc(crtc)->pipe;
12291
12292
12293
12294
12295
12296
12297 if (!(pipe_config->base.adjusted_mode.flags &
12298 (DRM_MODE_FLAG_PHSYNC | DRM_MODE_FLAG_NHSYNC)))
12299 pipe_config->base.adjusted_mode.flags |= DRM_MODE_FLAG_NHSYNC;
12300
12301 if (!(pipe_config->base.adjusted_mode.flags &
12302 (DRM_MODE_FLAG_PVSYNC | DRM_MODE_FLAG_NVSYNC)))
12303 pipe_config->base.adjusted_mode.flags |= DRM_MODE_FLAG_NVSYNC;
12304
12305 ret = compute_baseline_pipe_bpp(to_intel_crtc(crtc),
12306 pipe_config);
12307 if (ret)
12308 return ret;
12309
12310 base_bpp = pipe_config->pipe_bpp;
12311
12312
12313
12314
12315
12316
12317
12318
12319
12320 drm_mode_get_hv_timing(&pipe_config->base.mode,
12321 &pipe_config->pipe_src_w,
12322 &pipe_config->pipe_src_h);
12323
12324 for_each_new_connector_in_state(state, connector, connector_state, i) {
12325 if (connector_state->crtc != crtc)
12326 continue;
12327
12328 encoder = to_intel_encoder(connector_state->best_encoder);
12329
12330 if (!check_single_encoder_cloning(state, to_intel_crtc(crtc), encoder)) {
12331 DRM_DEBUG_KMS("rejecting invalid cloning configuration\n");
12332 return -EINVAL;
12333 }
12334
12335
12336
12337
12338
12339 if (encoder->compute_output_type)
12340 pipe_config->output_types |=
12341 BIT(encoder->compute_output_type(encoder, pipe_config,
12342 connector_state));
12343 else
12344 pipe_config->output_types |= BIT(encoder->type);
12345 }
12346
12347 encoder_retry:
12348
12349 pipe_config->port_clock = 0;
12350 pipe_config->pixel_multiplier = 1;
12351
12352
12353 drm_mode_set_crtcinfo(&pipe_config->base.adjusted_mode,
12354 CRTC_STEREO_DOUBLE);
12355
12356
12357
12358
12359
12360 for_each_new_connector_in_state(state, connector, connector_state, i) {
12361 if (connector_state->crtc != crtc)
12362 continue;
12363
12364 encoder = to_intel_encoder(connector_state->best_encoder);
12365 ret = encoder->compute_config(encoder, pipe_config,
12366 connector_state);
12367 if (ret < 0) {
12368 if (ret != -EDEADLK)
12369 DRM_DEBUG_KMS("Encoder config failure: %d\n",
12370 ret);
12371 return ret;
12372 }
12373 }
12374
12375
12376
12377 if (!pipe_config->port_clock)
12378 pipe_config->port_clock = pipe_config->base.adjusted_mode.crtc_clock
12379 * pipe_config->pixel_multiplier;
12380
12381 ret = intel_crtc_compute_config(to_intel_crtc(crtc), pipe_config);
12382 if (ret == -EDEADLK)
12383 return ret;
12384 if (ret < 0) {
12385 DRM_DEBUG_KMS("CRTC fixup failed\n");
12386 return ret;
12387 }
12388
12389 if (ret == RETRY) {
12390 if (WARN(!retry, "loop in pipe configuration computation\n"))
12391 return -EINVAL;
12392
12393 DRM_DEBUG_KMS("CRTC bw constrained, retrying\n");
12394 retry = false;
12395 goto encoder_retry;
12396 }
12397
12398
12399
12400
12401
12402 pipe_config->dither = (pipe_config->pipe_bpp == 6*3) &&
12403 !pipe_config->dither_force_disable;
12404 DRM_DEBUG_KMS("hw max bpp: %i, pipe bpp: %i, dithering: %i\n",
12405 base_bpp, pipe_config->pipe_bpp, pipe_config->dither);
12406
12407 return 0;
12408 }
12409
12410 bool intel_fuzzy_clock_check(int clock1, int clock2)
12411 {
12412 int diff;
12413
12414 if (clock1 == clock2)
12415 return true;
12416
12417 if (!clock1 || !clock2)
12418 return false;
12419
12420 diff = abs(clock1 - clock2);
12421
12422 if (((((diff + clock1 + clock2) * 100)) / (clock1 + clock2)) < 105)
12423 return true;
12424
12425 return false;
12426 }
12427
12428 static bool
12429 intel_compare_m_n(unsigned int m, unsigned int n,
12430 unsigned int m2, unsigned int n2,
12431 bool exact)
12432 {
12433 if (m == m2 && n == n2)
12434 return true;
12435
12436 if (exact || !m || !n || !m2 || !n2)
12437 return false;
12438
12439 BUILD_BUG_ON(DATA_LINK_M_N_MASK > INT_MAX);
12440
12441 if (n > n2) {
12442 while (n > n2) {
12443 m2 <<= 1;
12444 n2 <<= 1;
12445 }
12446 } else if (n < n2) {
12447 while (n < n2) {
12448 m <<= 1;
12449 n <<= 1;
12450 }
12451 }
12452
12453 if (n != n2)
12454 return false;
12455
12456 return intel_fuzzy_clock_check(m, m2);
12457 }
12458
12459 static bool
12460 intel_compare_link_m_n(const struct intel_link_m_n *m_n,
12461 const struct intel_link_m_n *m2_n2,
12462 bool exact)
12463 {
12464 return m_n->tu == m2_n2->tu &&
12465 intel_compare_m_n(m_n->gmch_m, m_n->gmch_n,
12466 m2_n2->gmch_m, m2_n2->gmch_n, exact) &&
12467 intel_compare_m_n(m_n->link_m, m_n->link_n,
12468 m2_n2->link_m, m2_n2->link_n, exact);
12469 }
12470
12471 static bool
12472 intel_compare_infoframe(const union hdmi_infoframe *a,
12473 const union hdmi_infoframe *b)
12474 {
12475 return memcmp(a, b, sizeof(*a)) == 0;
12476 }
12477
12478 static void
12479 pipe_config_infoframe_mismatch(struct drm_i915_private *dev_priv,
12480 bool fastset, const char *name,
12481 const union hdmi_infoframe *a,
12482 const union hdmi_infoframe *b)
12483 {
12484 if (fastset) {
12485 if ((drm_debug & DRM_UT_KMS) == 0)
12486 return;
12487
12488 drm_dbg(DRM_UT_KMS, "fastset mismatch in %s infoframe", name);
12489 drm_dbg(DRM_UT_KMS, "expected:");
12490 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, a);
12491 drm_dbg(DRM_UT_KMS, "found");
12492 hdmi_infoframe_log(KERN_DEBUG, dev_priv->drm.dev, b);
12493 } else {
12494 drm_err("mismatch in %s infoframe", name);
12495 drm_err("expected:");
12496 hdmi_infoframe_log(KERN_ERR, dev_priv->drm.dev, a);
12497 drm_err("found");
12498 hdmi_infoframe_log(KERN_ERR, dev_priv->drm.dev, b);
12499 }
12500 }
12501
12502 static void __printf(3, 4)
12503 pipe_config_mismatch(bool fastset, const char *name, const char *format, ...)
12504 {
12505 struct va_format vaf;
12506 va_list args;
12507
12508 va_start(args, format);
12509 vaf.fmt = format;
12510 vaf.va = &args;
12511
12512 if (fastset)
12513 drm_dbg(DRM_UT_KMS, "fastset mismatch in %s %pV", name, &vaf);
12514 else
12515 drm_err("mismatch in %s %pV", name, &vaf);
12516
12517 va_end(args);
12518 }
12519
12520 static bool fastboot_enabled(struct drm_i915_private *dev_priv)
12521 {
12522 if (i915_modparams.fastboot != -1)
12523 return i915_modparams.fastboot;
12524
12525
12526 if (INTEL_GEN(dev_priv) >= 9)
12527 return true;
12528
12529
12530 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
12531 return true;
12532
12533
12534 return false;
12535 }
12536
12537 static bool
12538 intel_pipe_config_compare(const struct intel_crtc_state *current_config,
12539 const struct intel_crtc_state *pipe_config,
12540 bool fastset)
12541 {
12542 struct drm_i915_private *dev_priv = to_i915(current_config->base.crtc->dev);
12543 bool ret = true;
12544 bool fixup_inherited = fastset &&
12545 (current_config->base.mode.private_flags & I915_MODE_FLAG_INHERITED) &&
12546 !(pipe_config->base.mode.private_flags & I915_MODE_FLAG_INHERITED);
12547
12548 if (fixup_inherited && !fastboot_enabled(dev_priv)) {
12549 DRM_DEBUG_KMS("initial modeset and fastboot not set\n");
12550 ret = false;
12551 }
12552
12553 #define PIPE_CONF_CHECK_X(name) do { \
12554 if (current_config->name != pipe_config->name) { \
12555 pipe_config_mismatch(fastset, __stringify(name), \
12556 "(expected 0x%08x, found 0x%08x)\n", \
12557 current_config->name, \
12558 pipe_config->name); \
12559 ret = false; \
12560 } \
12561 } while (0)
12562
12563 #define PIPE_CONF_CHECK_I(name) do { \
12564 if (current_config->name != pipe_config->name) { \
12565 pipe_config_mismatch(fastset, __stringify(name), \
12566 "(expected %i, found %i)\n", \
12567 current_config->name, \
12568 pipe_config->name); \
12569 ret = false; \
12570 } \
12571 } while (0)
12572
12573 #define PIPE_CONF_CHECK_BOOL(name) do { \
12574 if (current_config->name != pipe_config->name) { \
12575 pipe_config_mismatch(fastset, __stringify(name), \
12576 "(expected %s, found %s)\n", \
12577 yesno(current_config->name), \
12578 yesno(pipe_config->name)); \
12579 ret = false; \
12580 } \
12581 } while (0)
12582
12583
12584
12585
12586
12587
12588 #define PIPE_CONF_CHECK_BOOL_INCOMPLETE(name) do { \
12589 if (!fixup_inherited || (!current_config->name && !pipe_config->name)) { \
12590 PIPE_CONF_CHECK_BOOL(name); \
12591 } else { \
12592 pipe_config_mismatch(fastset, __stringify(name), \
12593 "unable to verify whether state matches exactly, forcing modeset (expected %s, found %s)\n", \
12594 yesno(current_config->name), \
12595 yesno(pipe_config->name)); \
12596 ret = false; \
12597 } \
12598 } while (0)
12599
12600 #define PIPE_CONF_CHECK_P(name) do { \
12601 if (current_config->name != pipe_config->name) { \
12602 pipe_config_mismatch(fastset, __stringify(name), \
12603 "(expected %p, found %p)\n", \
12604 current_config->name, \
12605 pipe_config->name); \
12606 ret = false; \
12607 } \
12608 } while (0)
12609
12610 #define PIPE_CONF_CHECK_M_N(name) do { \
12611 if (!intel_compare_link_m_n(¤t_config->name, \
12612 &pipe_config->name,\
12613 !fastset)) { \
12614 pipe_config_mismatch(fastset, __stringify(name), \
12615 "(expected tu %i gmch %i/%i link %i/%i, " \
12616 "found tu %i, gmch %i/%i link %i/%i)\n", \
12617 current_config->name.tu, \
12618 current_config->name.gmch_m, \
12619 current_config->name.gmch_n, \
12620 current_config->name.link_m, \
12621 current_config->name.link_n, \
12622 pipe_config->name.tu, \
12623 pipe_config->name.gmch_m, \
12624 pipe_config->name.gmch_n, \
12625 pipe_config->name.link_m, \
12626 pipe_config->name.link_n); \
12627 ret = false; \
12628 } \
12629 } while (0)
12630
12631
12632
12633
12634
12635
12636 #define PIPE_CONF_CHECK_M_N_ALT(name, alt_name) do { \
12637 if (!intel_compare_link_m_n(¤t_config->name, \
12638 &pipe_config->name, !fastset) && \
12639 !intel_compare_link_m_n(¤t_config->alt_name, \
12640 &pipe_config->name, !fastset)) { \
12641 pipe_config_mismatch(fastset, __stringify(name), \
12642 "(expected tu %i gmch %i/%i link %i/%i, " \
12643 "or tu %i gmch %i/%i link %i/%i, " \
12644 "found tu %i, gmch %i/%i link %i/%i)\n", \
12645 current_config->name.tu, \
12646 current_config->name.gmch_m, \
12647 current_config->name.gmch_n, \
12648 current_config->name.link_m, \
12649 current_config->name.link_n, \
12650 current_config->alt_name.tu, \
12651 current_config->alt_name.gmch_m, \
12652 current_config->alt_name.gmch_n, \
12653 current_config->alt_name.link_m, \
12654 current_config->alt_name.link_n, \
12655 pipe_config->name.tu, \
12656 pipe_config->name.gmch_m, \
12657 pipe_config->name.gmch_n, \
12658 pipe_config->name.link_m, \
12659 pipe_config->name.link_n); \
12660 ret = false; \
12661 } \
12662 } while (0)
12663
12664 #define PIPE_CONF_CHECK_FLAGS(name, mask) do { \
12665 if ((current_config->name ^ pipe_config->name) & (mask)) { \
12666 pipe_config_mismatch(fastset, __stringify(name), \
12667 "(%x) (expected %i, found %i)\n", \
12668 (mask), \
12669 current_config->name & (mask), \
12670 pipe_config->name & (mask)); \
12671 ret = false; \
12672 } \
12673 } while (0)
12674
12675 #define PIPE_CONF_CHECK_CLOCK_FUZZY(name) do { \
12676 if (!intel_fuzzy_clock_check(current_config->name, pipe_config->name)) { \
12677 pipe_config_mismatch(fastset, __stringify(name), \
12678 "(expected %i, found %i)\n", \
12679 current_config->name, \
12680 pipe_config->name); \
12681 ret = false; \
12682 } \
12683 } while (0)
12684
12685 #define PIPE_CONF_CHECK_INFOFRAME(name) do { \
12686 if (!intel_compare_infoframe(¤t_config->infoframes.name, \
12687 &pipe_config->infoframes.name)) { \
12688 pipe_config_infoframe_mismatch(dev_priv, fastset, __stringify(name), \
12689 ¤t_config->infoframes.name, \
12690 &pipe_config->infoframes.name); \
12691 ret = false; \
12692 } \
12693 } while (0)
12694
12695 #define PIPE_CONF_QUIRK(quirk) \
12696 ((current_config->quirks | pipe_config->quirks) & (quirk))
12697
12698 PIPE_CONF_CHECK_I(cpu_transcoder);
12699
12700 PIPE_CONF_CHECK_BOOL(has_pch_encoder);
12701 PIPE_CONF_CHECK_I(fdi_lanes);
12702 PIPE_CONF_CHECK_M_N(fdi_m_n);
12703
12704 PIPE_CONF_CHECK_I(lane_count);
12705 PIPE_CONF_CHECK_X(lane_lat_optim_mask);
12706
12707 if (INTEL_GEN(dev_priv) < 8) {
12708 PIPE_CONF_CHECK_M_N(dp_m_n);
12709
12710 if (current_config->has_drrs)
12711 PIPE_CONF_CHECK_M_N(dp_m2_n2);
12712 } else
12713 PIPE_CONF_CHECK_M_N_ALT(dp_m_n, dp_m2_n2);
12714
12715 PIPE_CONF_CHECK_X(output_types);
12716
12717 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_hdisplay);
12718 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_htotal);
12719 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_hblank_start);
12720 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_hblank_end);
12721 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_hsync_start);
12722 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_hsync_end);
12723
12724 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vdisplay);
12725 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vtotal);
12726 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vblank_start);
12727 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vblank_end);
12728 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vsync_start);
12729 PIPE_CONF_CHECK_I(base.adjusted_mode.crtc_vsync_end);
12730
12731 PIPE_CONF_CHECK_I(pixel_multiplier);
12732 PIPE_CONF_CHECK_I(output_format);
12733 PIPE_CONF_CHECK_BOOL(has_hdmi_sink);
12734 if ((INTEL_GEN(dev_priv) < 8 && !IS_HASWELL(dev_priv)) ||
12735 IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
12736 PIPE_CONF_CHECK_BOOL(limited_color_range);
12737
12738 PIPE_CONF_CHECK_BOOL(hdmi_scrambling);
12739 PIPE_CONF_CHECK_BOOL(hdmi_high_tmds_clock_ratio);
12740 PIPE_CONF_CHECK_BOOL(has_infoframe);
12741
12742 PIPE_CONF_CHECK_BOOL_INCOMPLETE(has_audio);
12743
12744 PIPE_CONF_CHECK_FLAGS(base.adjusted_mode.flags,
12745 DRM_MODE_FLAG_INTERLACE);
12746
12747 if (!PIPE_CONF_QUIRK(PIPE_CONFIG_QUIRK_MODE_SYNC_FLAGS)) {
12748 PIPE_CONF_CHECK_FLAGS(base.adjusted_mode.flags,
12749 DRM_MODE_FLAG_PHSYNC);
12750 PIPE_CONF_CHECK_FLAGS(base.adjusted_mode.flags,
12751 DRM_MODE_FLAG_NHSYNC);
12752 PIPE_CONF_CHECK_FLAGS(base.adjusted_mode.flags,
12753 DRM_MODE_FLAG_PVSYNC);
12754 PIPE_CONF_CHECK_FLAGS(base.adjusted_mode.flags,
12755 DRM_MODE_FLAG_NVSYNC);
12756 }
12757
12758 PIPE_CONF_CHECK_X(gmch_pfit.control);
12759
12760 if (INTEL_GEN(dev_priv) < 4)
12761 PIPE_CONF_CHECK_X(gmch_pfit.pgm_ratios);
12762 PIPE_CONF_CHECK_X(gmch_pfit.lvds_border_bits);
12763
12764
12765
12766
12767
12768 PIPE_CONF_CHECK_BOOL(pch_pfit.force_thru);
12769
12770 if (!fastset) {
12771 PIPE_CONF_CHECK_I(pipe_src_w);
12772 PIPE_CONF_CHECK_I(pipe_src_h);
12773
12774 PIPE_CONF_CHECK_BOOL(pch_pfit.enabled);
12775 if (current_config->pch_pfit.enabled) {
12776 PIPE_CONF_CHECK_X(pch_pfit.pos);
12777 PIPE_CONF_CHECK_X(pch_pfit.size);
12778 }
12779
12780 PIPE_CONF_CHECK_I(scaler_state.scaler_id);
12781 PIPE_CONF_CHECK_CLOCK_FUZZY(pixel_rate);
12782
12783 PIPE_CONF_CHECK_X(gamma_mode);
12784 if (IS_CHERRYVIEW(dev_priv))
12785 PIPE_CONF_CHECK_X(cgm_mode);
12786 else
12787 PIPE_CONF_CHECK_X(csc_mode);
12788 PIPE_CONF_CHECK_BOOL(gamma_enable);
12789 PIPE_CONF_CHECK_BOOL(csc_enable);
12790 }
12791
12792 PIPE_CONF_CHECK_BOOL(double_wide);
12793
12794 PIPE_CONF_CHECK_P(shared_dpll);
12795 PIPE_CONF_CHECK_X(dpll_hw_state.dpll);
12796 PIPE_CONF_CHECK_X(dpll_hw_state.dpll_md);
12797 PIPE_CONF_CHECK_X(dpll_hw_state.fp0);
12798 PIPE_CONF_CHECK_X(dpll_hw_state.fp1);
12799 PIPE_CONF_CHECK_X(dpll_hw_state.wrpll);
12800 PIPE_CONF_CHECK_X(dpll_hw_state.spll);
12801 PIPE_CONF_CHECK_X(dpll_hw_state.ctrl1);
12802 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr1);
12803 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr2);
12804 PIPE_CONF_CHECK_X(dpll_hw_state.cfgcr0);
12805 PIPE_CONF_CHECK_X(dpll_hw_state.ebb0);
12806 PIPE_CONF_CHECK_X(dpll_hw_state.ebb4);
12807 PIPE_CONF_CHECK_X(dpll_hw_state.pll0);
12808 PIPE_CONF_CHECK_X(dpll_hw_state.pll1);
12809 PIPE_CONF_CHECK_X(dpll_hw_state.pll2);
12810 PIPE_CONF_CHECK_X(dpll_hw_state.pll3);
12811 PIPE_CONF_CHECK_X(dpll_hw_state.pll6);
12812 PIPE_CONF_CHECK_X(dpll_hw_state.pll8);
12813 PIPE_CONF_CHECK_X(dpll_hw_state.pll9);
12814 PIPE_CONF_CHECK_X(dpll_hw_state.pll10);
12815 PIPE_CONF_CHECK_X(dpll_hw_state.pcsdw12);
12816 PIPE_CONF_CHECK_X(dpll_hw_state.mg_refclkin_ctl);
12817 PIPE_CONF_CHECK_X(dpll_hw_state.mg_clktop2_coreclkctl1);
12818 PIPE_CONF_CHECK_X(dpll_hw_state.mg_clktop2_hsclkctl);
12819 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_div0);
12820 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_div1);
12821 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_lf);
12822 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_frac_lock);
12823 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_ssc);
12824 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_bias);
12825 PIPE_CONF_CHECK_X(dpll_hw_state.mg_pll_tdc_coldst_bias);
12826
12827 PIPE_CONF_CHECK_X(dsi_pll.ctrl);
12828 PIPE_CONF_CHECK_X(dsi_pll.div);
12829
12830 if (IS_G4X(dev_priv) || INTEL_GEN(dev_priv) >= 5)
12831 PIPE_CONF_CHECK_I(pipe_bpp);
12832
12833 PIPE_CONF_CHECK_CLOCK_FUZZY(base.adjusted_mode.crtc_clock);
12834 PIPE_CONF_CHECK_CLOCK_FUZZY(port_clock);
12835
12836 PIPE_CONF_CHECK_I(min_voltage_level);
12837
12838 PIPE_CONF_CHECK_X(infoframes.enable);
12839 PIPE_CONF_CHECK_X(infoframes.gcp);
12840 PIPE_CONF_CHECK_INFOFRAME(avi);
12841 PIPE_CONF_CHECK_INFOFRAME(spd);
12842 PIPE_CONF_CHECK_INFOFRAME(hdmi);
12843 PIPE_CONF_CHECK_INFOFRAME(drm);
12844
12845 #undef PIPE_CONF_CHECK_X
12846 #undef PIPE_CONF_CHECK_I
12847 #undef PIPE_CONF_CHECK_BOOL
12848 #undef PIPE_CONF_CHECK_BOOL_INCOMPLETE
12849 #undef PIPE_CONF_CHECK_P
12850 #undef PIPE_CONF_CHECK_FLAGS
12851 #undef PIPE_CONF_CHECK_CLOCK_FUZZY
12852 #undef PIPE_CONF_QUIRK
12853
12854 return ret;
12855 }
12856
12857 static void intel_pipe_config_sanity_check(struct drm_i915_private *dev_priv,
12858 const struct intel_crtc_state *pipe_config)
12859 {
12860 if (pipe_config->has_pch_encoder) {
12861 int fdi_dotclock = intel_dotclock_calculate(intel_fdi_link_freq(dev_priv, pipe_config),
12862 &pipe_config->fdi_m_n);
12863 int dotclock = pipe_config->base.adjusted_mode.crtc_clock;
12864
12865
12866
12867
12868
12869 WARN(!intel_fuzzy_clock_check(fdi_dotclock, dotclock),
12870 "FDI dotclock and encoder dotclock mismatch, fdi: %i, encoder: %i\n",
12871 fdi_dotclock, dotclock);
12872 }
12873 }
12874
12875 static void verify_wm_state(struct intel_crtc *crtc,
12876 struct intel_crtc_state *new_crtc_state)
12877 {
12878 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
12879 struct skl_hw_state {
12880 struct skl_ddb_entry ddb_y[I915_MAX_PLANES];
12881 struct skl_ddb_entry ddb_uv[I915_MAX_PLANES];
12882 struct skl_ddb_allocation ddb;
12883 struct skl_pipe_wm wm;
12884 } *hw;
12885 struct skl_ddb_allocation *sw_ddb;
12886 struct skl_pipe_wm *sw_wm;
12887 struct skl_ddb_entry *hw_ddb_entry, *sw_ddb_entry;
12888 const enum pipe pipe = crtc->pipe;
12889 int plane, level, max_level = ilk_wm_max_level(dev_priv);
12890
12891 if (INTEL_GEN(dev_priv) < 9 || !new_crtc_state->base.active)
12892 return;
12893
12894 hw = kzalloc(sizeof(*hw), GFP_KERNEL);
12895 if (!hw)
12896 return;
12897
12898 skl_pipe_wm_get_hw_state(crtc, &hw->wm);
12899 sw_wm = &new_crtc_state->wm.skl.optimal;
12900
12901 skl_pipe_ddb_get_hw_state(crtc, hw->ddb_y, hw->ddb_uv);
12902
12903 skl_ddb_get_hw_state(dev_priv, &hw->ddb);
12904 sw_ddb = &dev_priv->wm.skl_hw.ddb;
12905
12906 if (INTEL_GEN(dev_priv) >= 11 &&
12907 hw->ddb.enabled_slices != sw_ddb->enabled_slices)
12908 DRM_ERROR("mismatch in DBUF Slices (expected %u, got %u)\n",
12909 sw_ddb->enabled_slices,
12910 hw->ddb.enabled_slices);
12911
12912
12913 for_each_universal_plane(dev_priv, pipe, plane) {
12914 struct skl_plane_wm *hw_plane_wm, *sw_plane_wm;
12915
12916 hw_plane_wm = &hw->wm.planes[plane];
12917 sw_plane_wm = &sw_wm->planes[plane];
12918
12919
12920 for (level = 0; level <= max_level; level++) {
12921 if (skl_wm_level_equals(&hw_plane_wm->wm[level],
12922 &sw_plane_wm->wm[level]))
12923 continue;
12924
12925 DRM_ERROR("mismatch in WM pipe %c plane %d level %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
12926 pipe_name(pipe), plane + 1, level,
12927 sw_plane_wm->wm[level].plane_en,
12928 sw_plane_wm->wm[level].plane_res_b,
12929 sw_plane_wm->wm[level].plane_res_l,
12930 hw_plane_wm->wm[level].plane_en,
12931 hw_plane_wm->wm[level].plane_res_b,
12932 hw_plane_wm->wm[level].plane_res_l);
12933 }
12934
12935 if (!skl_wm_level_equals(&hw_plane_wm->trans_wm,
12936 &sw_plane_wm->trans_wm)) {
12937 DRM_ERROR("mismatch in trans WM pipe %c plane %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
12938 pipe_name(pipe), plane + 1,
12939 sw_plane_wm->trans_wm.plane_en,
12940 sw_plane_wm->trans_wm.plane_res_b,
12941 sw_plane_wm->trans_wm.plane_res_l,
12942 hw_plane_wm->trans_wm.plane_en,
12943 hw_plane_wm->trans_wm.plane_res_b,
12944 hw_plane_wm->trans_wm.plane_res_l);
12945 }
12946
12947
12948 hw_ddb_entry = &hw->ddb_y[plane];
12949 sw_ddb_entry = &new_crtc_state->wm.skl.plane_ddb_y[plane];
12950
12951 if (!skl_ddb_entry_equal(hw_ddb_entry, sw_ddb_entry)) {
12952 DRM_ERROR("mismatch in DDB state pipe %c plane %d (expected (%u,%u), found (%u,%u))\n",
12953 pipe_name(pipe), plane + 1,
12954 sw_ddb_entry->start, sw_ddb_entry->end,
12955 hw_ddb_entry->start, hw_ddb_entry->end);
12956 }
12957 }
12958
12959
12960
12961
12962
12963
12964
12965 if (1) {
12966 struct skl_plane_wm *hw_plane_wm, *sw_plane_wm;
12967
12968 hw_plane_wm = &hw->wm.planes[PLANE_CURSOR];
12969 sw_plane_wm = &sw_wm->planes[PLANE_CURSOR];
12970
12971
12972 for (level = 0; level <= max_level; level++) {
12973 if (skl_wm_level_equals(&hw_plane_wm->wm[level],
12974 &sw_plane_wm->wm[level]))
12975 continue;
12976
12977 DRM_ERROR("mismatch in WM pipe %c cursor level %d (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
12978 pipe_name(pipe), level,
12979 sw_plane_wm->wm[level].plane_en,
12980 sw_plane_wm->wm[level].plane_res_b,
12981 sw_plane_wm->wm[level].plane_res_l,
12982 hw_plane_wm->wm[level].plane_en,
12983 hw_plane_wm->wm[level].plane_res_b,
12984 hw_plane_wm->wm[level].plane_res_l);
12985 }
12986
12987 if (!skl_wm_level_equals(&hw_plane_wm->trans_wm,
12988 &sw_plane_wm->trans_wm)) {
12989 DRM_ERROR("mismatch in trans WM pipe %c cursor (expected e=%d b=%u l=%u, got e=%d b=%u l=%u)\n",
12990 pipe_name(pipe),
12991 sw_plane_wm->trans_wm.plane_en,
12992 sw_plane_wm->trans_wm.plane_res_b,
12993 sw_plane_wm->trans_wm.plane_res_l,
12994 hw_plane_wm->trans_wm.plane_en,
12995 hw_plane_wm->trans_wm.plane_res_b,
12996 hw_plane_wm->trans_wm.plane_res_l);
12997 }
12998
12999
13000 hw_ddb_entry = &hw->ddb_y[PLANE_CURSOR];
13001 sw_ddb_entry = &new_crtc_state->wm.skl.plane_ddb_y[PLANE_CURSOR];
13002
13003 if (!skl_ddb_entry_equal(hw_ddb_entry, sw_ddb_entry)) {
13004 DRM_ERROR("mismatch in DDB state pipe %c cursor (expected (%u,%u), found (%u,%u))\n",
13005 pipe_name(pipe),
13006 sw_ddb_entry->start, sw_ddb_entry->end,
13007 hw_ddb_entry->start, hw_ddb_entry->end);
13008 }
13009 }
13010
13011 kfree(hw);
13012 }
13013
13014 static void
13015 verify_connector_state(struct intel_atomic_state *state,
13016 struct intel_crtc *crtc)
13017 {
13018 struct drm_connector *connector;
13019 struct drm_connector_state *new_conn_state;
13020 int i;
13021
13022 for_each_new_connector_in_state(&state->base, connector, new_conn_state, i) {
13023 struct drm_encoder *encoder = connector->encoder;
13024 struct intel_crtc_state *crtc_state = NULL;
13025
13026 if (new_conn_state->crtc != &crtc->base)
13027 continue;
13028
13029 if (crtc)
13030 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
13031
13032 intel_connector_verify_state(crtc_state, new_conn_state);
13033
13034 I915_STATE_WARN(new_conn_state->best_encoder != encoder,
13035 "connector's atomic encoder doesn't match legacy encoder\n");
13036 }
13037 }
13038
13039 static void
13040 verify_encoder_state(struct drm_i915_private *dev_priv, struct intel_atomic_state *state)
13041 {
13042 struct intel_encoder *encoder;
13043 struct drm_connector *connector;
13044 struct drm_connector_state *old_conn_state, *new_conn_state;
13045 int i;
13046
13047 for_each_intel_encoder(&dev_priv->drm, encoder) {
13048 bool enabled = false, found = false;
13049 enum pipe pipe;
13050
13051 DRM_DEBUG_KMS("[ENCODER:%d:%s]\n",
13052 encoder->base.base.id,
13053 encoder->base.name);
13054
13055 for_each_oldnew_connector_in_state(&state->base, connector, old_conn_state,
13056 new_conn_state, i) {
13057 if (old_conn_state->best_encoder == &encoder->base)
13058 found = true;
13059
13060 if (new_conn_state->best_encoder != &encoder->base)
13061 continue;
13062 found = enabled = true;
13063
13064 I915_STATE_WARN(new_conn_state->crtc !=
13065 encoder->base.crtc,
13066 "connector's crtc doesn't match encoder crtc\n");
13067 }
13068
13069 if (!found)
13070 continue;
13071
13072 I915_STATE_WARN(!!encoder->base.crtc != enabled,
13073 "encoder's enabled state mismatch "
13074 "(expected %i, found %i)\n",
13075 !!encoder->base.crtc, enabled);
13076
13077 if (!encoder->base.crtc) {
13078 bool active;
13079
13080 active = encoder->get_hw_state(encoder, &pipe);
13081 I915_STATE_WARN(active,
13082 "encoder detached but still enabled on pipe %c.\n",
13083 pipe_name(pipe));
13084 }
13085 }
13086 }
13087
13088 static void
13089 verify_crtc_state(struct intel_crtc *crtc,
13090 struct intel_crtc_state *old_crtc_state,
13091 struct intel_crtc_state *new_crtc_state)
13092 {
13093 struct drm_device *dev = crtc->base.dev;
13094 struct drm_i915_private *dev_priv = to_i915(dev);
13095 struct intel_encoder *encoder;
13096 struct intel_crtc_state *pipe_config;
13097 struct drm_atomic_state *state;
13098 bool active;
13099
13100 state = old_crtc_state->base.state;
13101 __drm_atomic_helper_crtc_destroy_state(&old_crtc_state->base);
13102 pipe_config = old_crtc_state;
13103 memset(pipe_config, 0, sizeof(*pipe_config));
13104 pipe_config->base.crtc = &crtc->base;
13105 pipe_config->base.state = state;
13106
13107 DRM_DEBUG_KMS("[CRTC:%d:%s]\n", crtc->base.base.id, crtc->base.name);
13108
13109 active = dev_priv->display.get_pipe_config(crtc, pipe_config);
13110
13111
13112 if (IS_I830(dev_priv))
13113 active = new_crtc_state->base.active;
13114
13115 I915_STATE_WARN(new_crtc_state->base.active != active,
13116 "crtc active state doesn't match with hw state "
13117 "(expected %i, found %i)\n", new_crtc_state->base.active, active);
13118
13119 I915_STATE_WARN(crtc->active != new_crtc_state->base.active,
13120 "transitional active state does not match atomic hw state "
13121 "(expected %i, found %i)\n", new_crtc_state->base.active, crtc->active);
13122
13123 for_each_encoder_on_crtc(dev, &crtc->base, encoder) {
13124 enum pipe pipe;
13125
13126 active = encoder->get_hw_state(encoder, &pipe);
13127 I915_STATE_WARN(active != new_crtc_state->base.active,
13128 "[ENCODER:%i] active %i with crtc active %i\n",
13129 encoder->base.base.id, active, new_crtc_state->base.active);
13130
13131 I915_STATE_WARN(active && crtc->pipe != pipe,
13132 "Encoder connected to wrong pipe %c\n",
13133 pipe_name(pipe));
13134
13135 if (active)
13136 encoder->get_config(encoder, pipe_config);
13137 }
13138
13139 intel_crtc_compute_pixel_rate(pipe_config);
13140
13141 if (!new_crtc_state->base.active)
13142 return;
13143
13144 intel_pipe_config_sanity_check(dev_priv, pipe_config);
13145
13146 if (!intel_pipe_config_compare(new_crtc_state,
13147 pipe_config, false)) {
13148 I915_STATE_WARN(1, "pipe state doesn't match!\n");
13149 intel_dump_pipe_config(pipe_config, NULL, "[hw state]");
13150 intel_dump_pipe_config(new_crtc_state, NULL, "[sw state]");
13151 }
13152 }
13153
13154 static void
13155 intel_verify_planes(struct intel_atomic_state *state)
13156 {
13157 struct intel_plane *plane;
13158 const struct intel_plane_state *plane_state;
13159 int i;
13160
13161 for_each_new_intel_plane_in_state(state, plane,
13162 plane_state, i)
13163 assert_plane(plane, plane_state->slave ||
13164 plane_state->base.visible);
13165 }
13166
13167 static void
13168 verify_single_dpll_state(struct drm_i915_private *dev_priv,
13169 struct intel_shared_dpll *pll,
13170 struct intel_crtc *crtc,
13171 struct intel_crtc_state *new_crtc_state)
13172 {
13173 struct intel_dpll_hw_state dpll_hw_state;
13174 unsigned int crtc_mask;
13175 bool active;
13176
13177 memset(&dpll_hw_state, 0, sizeof(dpll_hw_state));
13178
13179 DRM_DEBUG_KMS("%s\n", pll->info->name);
13180
13181 active = pll->info->funcs->get_hw_state(dev_priv, pll, &dpll_hw_state);
13182
13183 if (!(pll->info->flags & INTEL_DPLL_ALWAYS_ON)) {
13184 I915_STATE_WARN(!pll->on && pll->active_mask,
13185 "pll in active use but not on in sw tracking\n");
13186 I915_STATE_WARN(pll->on && !pll->active_mask,
13187 "pll is on but not used by any active crtc\n");
13188 I915_STATE_WARN(pll->on != active,
13189 "pll on state mismatch (expected %i, found %i)\n",
13190 pll->on, active);
13191 }
13192
13193 if (!crtc) {
13194 I915_STATE_WARN(pll->active_mask & ~pll->state.crtc_mask,
13195 "more active pll users than references: %x vs %x\n",
13196 pll->active_mask, pll->state.crtc_mask);
13197
13198 return;
13199 }
13200
13201 crtc_mask = drm_crtc_mask(&crtc->base);
13202
13203 if (new_crtc_state->base.active)
13204 I915_STATE_WARN(!(pll->active_mask & crtc_mask),
13205 "pll active mismatch (expected pipe %c in active mask 0x%02x)\n",
13206 pipe_name(drm_crtc_index(&crtc->base)), pll->active_mask);
13207 else
13208 I915_STATE_WARN(pll->active_mask & crtc_mask,
13209 "pll active mismatch (didn't expect pipe %c in active mask 0x%02x)\n",
13210 pipe_name(drm_crtc_index(&crtc->base)), pll->active_mask);
13211
13212 I915_STATE_WARN(!(pll->state.crtc_mask & crtc_mask),
13213 "pll enabled crtcs mismatch (expected 0x%x in 0x%02x)\n",
13214 crtc_mask, pll->state.crtc_mask);
13215
13216 I915_STATE_WARN(pll->on && memcmp(&pll->state.hw_state,
13217 &dpll_hw_state,
13218 sizeof(dpll_hw_state)),
13219 "pll hw state mismatch\n");
13220 }
13221
13222 static void
13223 verify_shared_dpll_state(struct intel_crtc *crtc,
13224 struct intel_crtc_state *old_crtc_state,
13225 struct intel_crtc_state *new_crtc_state)
13226 {
13227 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13228
13229 if (new_crtc_state->shared_dpll)
13230 verify_single_dpll_state(dev_priv, new_crtc_state->shared_dpll, crtc, new_crtc_state);
13231
13232 if (old_crtc_state->shared_dpll &&
13233 old_crtc_state->shared_dpll != new_crtc_state->shared_dpll) {
13234 unsigned int crtc_mask = drm_crtc_mask(&crtc->base);
13235 struct intel_shared_dpll *pll = old_crtc_state->shared_dpll;
13236
13237 I915_STATE_WARN(pll->active_mask & crtc_mask,
13238 "pll active mismatch (didn't expect pipe %c in active mask)\n",
13239 pipe_name(drm_crtc_index(&crtc->base)));
13240 I915_STATE_WARN(pll->state.crtc_mask & crtc_mask,
13241 "pll enabled crtcs mismatch (found %x in enabled mask)\n",
13242 pipe_name(drm_crtc_index(&crtc->base)));
13243 }
13244 }
13245
13246 static void
13247 intel_modeset_verify_crtc(struct intel_crtc *crtc,
13248 struct intel_atomic_state *state,
13249 struct intel_crtc_state *old_crtc_state,
13250 struct intel_crtc_state *new_crtc_state)
13251 {
13252 if (!needs_modeset(new_crtc_state) && !new_crtc_state->update_pipe)
13253 return;
13254
13255 verify_wm_state(crtc, new_crtc_state);
13256 verify_connector_state(state, crtc);
13257 verify_crtc_state(crtc, old_crtc_state, new_crtc_state);
13258 verify_shared_dpll_state(crtc, old_crtc_state, new_crtc_state);
13259 }
13260
13261 static void
13262 verify_disabled_dpll_state(struct drm_i915_private *dev_priv)
13263 {
13264 int i;
13265
13266 for (i = 0; i < dev_priv->num_shared_dpll; i++)
13267 verify_single_dpll_state(dev_priv, &dev_priv->shared_dplls[i], NULL, NULL);
13268 }
13269
13270 static void
13271 intel_modeset_verify_disabled(struct drm_i915_private *dev_priv,
13272 struct intel_atomic_state *state)
13273 {
13274 verify_encoder_state(dev_priv, state);
13275 verify_connector_state(state, NULL);
13276 verify_disabled_dpll_state(dev_priv);
13277 }
13278
13279 static void update_scanline_offset(const struct intel_crtc_state *crtc_state)
13280 {
13281 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
13282 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
13283
13284
13285
13286
13287
13288
13289
13290
13291
13292
13293
13294
13295
13296
13297
13298
13299
13300
13301
13302
13303
13304
13305
13306
13307
13308
13309
13310
13311 if (IS_GEN(dev_priv, 2)) {
13312 const struct drm_display_mode *adjusted_mode = &crtc_state->base.adjusted_mode;
13313 int vtotal;
13314
13315 vtotal = adjusted_mode->crtc_vtotal;
13316 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
13317 vtotal /= 2;
13318
13319 crtc->scanline_offset = vtotal - 1;
13320 } else if (HAS_DDI(dev_priv) &&
13321 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
13322 crtc->scanline_offset = 2;
13323 } else
13324 crtc->scanline_offset = 1;
13325 }
13326
13327 static void intel_modeset_clear_plls(struct intel_atomic_state *state)
13328 {
13329 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
13330 struct intel_crtc_state *new_crtc_state;
13331 struct intel_crtc *crtc;
13332 int i;
13333
13334 if (!dev_priv->display.crtc_compute_clock)
13335 return;
13336
13337 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
13338 if (!needs_modeset(new_crtc_state))
13339 continue;
13340
13341 intel_release_shared_dplls(state, crtc);
13342 }
13343 }
13344
13345
13346
13347
13348
13349
13350
13351 static int haswell_mode_set_planes_workaround(struct intel_atomic_state *state)
13352 {
13353 struct intel_crtc_state *crtc_state;
13354 struct intel_crtc *crtc;
13355 struct intel_crtc_state *first_crtc_state = NULL;
13356 struct intel_crtc_state *other_crtc_state = NULL;
13357 enum pipe first_pipe = INVALID_PIPE, enabled_pipe = INVALID_PIPE;
13358 int i;
13359
13360
13361 for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
13362 if (!crtc_state->base.active ||
13363 !needs_modeset(crtc_state))
13364 continue;
13365
13366 if (first_crtc_state) {
13367 other_crtc_state = crtc_state;
13368 break;
13369 } else {
13370 first_crtc_state = crtc_state;
13371 first_pipe = crtc->pipe;
13372 }
13373 }
13374
13375
13376 if (!first_crtc_state)
13377 return 0;
13378
13379
13380 for_each_intel_crtc(state->base.dev, crtc) {
13381 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
13382 if (IS_ERR(crtc_state))
13383 return PTR_ERR(crtc_state);
13384
13385 crtc_state->hsw_workaround_pipe = INVALID_PIPE;
13386
13387 if (!crtc_state->base.active ||
13388 needs_modeset(crtc_state))
13389 continue;
13390
13391
13392 if (enabled_pipe != INVALID_PIPE)
13393 return 0;
13394
13395 enabled_pipe = crtc->pipe;
13396 }
13397
13398 if (enabled_pipe != INVALID_PIPE)
13399 first_crtc_state->hsw_workaround_pipe = enabled_pipe;
13400 else if (other_crtc_state)
13401 other_crtc_state->hsw_workaround_pipe = first_pipe;
13402
13403 return 0;
13404 }
13405
13406 static int intel_lock_all_pipes(struct intel_atomic_state *state)
13407 {
13408 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
13409 struct intel_crtc *crtc;
13410
13411
13412 for_each_intel_crtc(&dev_priv->drm, crtc) {
13413 struct intel_crtc_state *crtc_state;
13414
13415 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
13416 if (IS_ERR(crtc_state))
13417 return PTR_ERR(crtc_state);
13418 }
13419
13420 return 0;
13421 }
13422
13423 static int intel_modeset_all_pipes(struct intel_atomic_state *state)
13424 {
13425 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
13426 struct intel_crtc *crtc;
13427
13428
13429
13430
13431
13432 for_each_intel_crtc(&dev_priv->drm, crtc) {
13433 struct intel_crtc_state *crtc_state;
13434 int ret;
13435
13436 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
13437 if (IS_ERR(crtc_state))
13438 return PTR_ERR(crtc_state);
13439
13440 if (!crtc_state->base.active || needs_modeset(crtc_state))
13441 continue;
13442
13443 crtc_state->base.mode_changed = true;
13444
13445 ret = drm_atomic_add_affected_connectors(&state->base,
13446 &crtc->base);
13447 if (ret)
13448 return ret;
13449
13450 ret = drm_atomic_add_affected_planes(&state->base,
13451 &crtc->base);
13452 if (ret)
13453 return ret;
13454 }
13455
13456 return 0;
13457 }
13458
13459 static int intel_modeset_checks(struct intel_atomic_state *state)
13460 {
13461 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
13462 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
13463 struct intel_crtc *crtc;
13464 int ret = 0, i;
13465
13466 if (!check_digital_port_conflicts(state)) {
13467 DRM_DEBUG_KMS("rejecting conflicting digital port configuration\n");
13468 return -EINVAL;
13469 }
13470
13471
13472 if (!state->cdclk.force_min_cdclk_changed)
13473 state->cdclk.force_min_cdclk = dev_priv->cdclk.force_min_cdclk;
13474
13475 state->modeset = true;
13476 state->active_crtcs = dev_priv->active_crtcs;
13477 state->cdclk.logical = dev_priv->cdclk.logical;
13478 state->cdclk.actual = dev_priv->cdclk.actual;
13479 state->cdclk.pipe = INVALID_PIPE;
13480
13481 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
13482 new_crtc_state, i) {
13483 if (new_crtc_state->base.active)
13484 state->active_crtcs |= 1 << i;
13485 else
13486 state->active_crtcs &= ~(1 << i);
13487
13488 if (old_crtc_state->base.active != new_crtc_state->base.active)
13489 state->active_pipe_changes |= drm_crtc_mask(&crtc->base);
13490 }
13491
13492
13493
13494
13495
13496
13497
13498
13499 if (dev_priv->display.modeset_calc_cdclk) {
13500 enum pipe pipe;
13501
13502 ret = dev_priv->display.modeset_calc_cdclk(state);
13503 if (ret < 0)
13504 return ret;
13505
13506
13507
13508
13509
13510
13511 if (intel_cdclk_changed(&dev_priv->cdclk.logical,
13512 &state->cdclk.logical)) {
13513 ret = intel_lock_all_pipes(state);
13514 if (ret < 0)
13515 return ret;
13516 }
13517
13518 if (is_power_of_2(state->active_crtcs)) {
13519 struct intel_crtc *crtc;
13520 struct intel_crtc_state *crtc_state;
13521
13522 pipe = ilog2(state->active_crtcs);
13523 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
13524 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
13525 if (crtc_state && needs_modeset(crtc_state))
13526 pipe = INVALID_PIPE;
13527 } else {
13528 pipe = INVALID_PIPE;
13529 }
13530
13531
13532 if (pipe != INVALID_PIPE &&
13533 intel_cdclk_needs_cd2x_update(dev_priv,
13534 &dev_priv->cdclk.actual,
13535 &state->cdclk.actual)) {
13536 ret = intel_lock_all_pipes(state);
13537 if (ret < 0)
13538 return ret;
13539
13540 state->cdclk.pipe = pipe;
13541 } else if (intel_cdclk_needs_modeset(&dev_priv->cdclk.actual,
13542 &state->cdclk.actual)) {
13543 ret = intel_modeset_all_pipes(state);
13544 if (ret < 0)
13545 return ret;
13546
13547 state->cdclk.pipe = INVALID_PIPE;
13548 }
13549
13550 DRM_DEBUG_KMS("New cdclk calculated to be logical %u kHz, actual %u kHz\n",
13551 state->cdclk.logical.cdclk,
13552 state->cdclk.actual.cdclk);
13553 DRM_DEBUG_KMS("New voltage level calculated to be logical %u, actual %u\n",
13554 state->cdclk.logical.voltage_level,
13555 state->cdclk.actual.voltage_level);
13556 }
13557
13558 intel_modeset_clear_plls(state);
13559
13560 if (IS_HASWELL(dev_priv))
13561 return haswell_mode_set_planes_workaround(state);
13562
13563 return 0;
13564 }
13565
13566
13567
13568
13569
13570
13571 static int calc_watermark_data(struct intel_atomic_state *state)
13572 {
13573 struct drm_device *dev = state->base.dev;
13574 struct drm_i915_private *dev_priv = to_i915(dev);
13575
13576
13577 if (dev_priv->display.compute_global_watermarks)
13578 return dev_priv->display.compute_global_watermarks(state);
13579
13580 return 0;
13581 }
13582
13583 static void intel_crtc_check_fastset(const struct intel_crtc_state *old_crtc_state,
13584 struct intel_crtc_state *new_crtc_state)
13585 {
13586 if (!intel_pipe_config_compare(old_crtc_state, new_crtc_state, true))
13587 return;
13588
13589 new_crtc_state->base.mode_changed = false;
13590 new_crtc_state->update_pipe = true;
13591
13592
13593
13594
13595
13596
13597
13598
13599
13600 new_crtc_state->fdi_m_n = old_crtc_state->fdi_m_n;
13601 new_crtc_state->dp_m_n = old_crtc_state->dp_m_n;
13602 new_crtc_state->dp_m2_n2 = old_crtc_state->dp_m2_n2;
13603 new_crtc_state->has_drrs = old_crtc_state->has_drrs;
13604 }
13605
13606
13607
13608
13609
13610
13611 static int intel_atomic_check(struct drm_device *dev,
13612 struct drm_atomic_state *_state)
13613 {
13614 struct drm_i915_private *dev_priv = to_i915(dev);
13615 struct intel_atomic_state *state = to_intel_atomic_state(_state);
13616 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
13617 struct intel_crtc *crtc;
13618 int ret, i;
13619 bool any_ms = state->cdclk.force_min_cdclk_changed;
13620
13621
13622 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
13623 new_crtc_state, i) {
13624 if (new_crtc_state->base.mode.private_flags !=
13625 old_crtc_state->base.mode.private_flags)
13626 new_crtc_state->base.mode_changed = true;
13627 }
13628
13629 ret = drm_atomic_helper_check_modeset(dev, &state->base);
13630 if (ret)
13631 goto fail;
13632
13633 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
13634 new_crtc_state, i) {
13635 if (!needs_modeset(new_crtc_state))
13636 continue;
13637
13638 if (!new_crtc_state->base.enable) {
13639 any_ms = true;
13640 continue;
13641 }
13642
13643 ret = intel_modeset_pipe_config(new_crtc_state);
13644 if (ret)
13645 goto fail;
13646
13647 intel_crtc_check_fastset(old_crtc_state, new_crtc_state);
13648
13649 if (needs_modeset(new_crtc_state))
13650 any_ms = true;
13651 }
13652
13653 ret = drm_dp_mst_atomic_check(&state->base);
13654 if (ret)
13655 goto fail;
13656
13657 if (any_ms) {
13658 ret = intel_modeset_checks(state);
13659 if (ret)
13660 goto fail;
13661 } else {
13662 state->cdclk.logical = dev_priv->cdclk.logical;
13663 }
13664
13665 ret = icl_add_linked_planes(state);
13666 if (ret)
13667 goto fail;
13668
13669 ret = drm_atomic_helper_check_planes(dev, &state->base);
13670 if (ret)
13671 goto fail;
13672
13673 intel_fbc_choose_crtc(dev_priv, state);
13674 ret = calc_watermark_data(state);
13675 if (ret)
13676 goto fail;
13677
13678 ret = intel_bw_atomic_check(state);
13679 if (ret)
13680 goto fail;
13681
13682 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
13683 new_crtc_state, i) {
13684 if (!needs_modeset(new_crtc_state) &&
13685 !new_crtc_state->update_pipe)
13686 continue;
13687
13688 intel_dump_pipe_config(new_crtc_state, state,
13689 needs_modeset(new_crtc_state) ?
13690 "[modeset]" : "[fastset]");
13691 }
13692
13693 return 0;
13694
13695 fail:
13696 if (ret == -EDEADLK)
13697 return ret;
13698
13699
13700
13701
13702
13703 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
13704 new_crtc_state, i)
13705 intel_dump_pipe_config(new_crtc_state, state, "[failed]");
13706
13707 return ret;
13708 }
13709
13710 static int intel_atomic_prepare_commit(struct intel_atomic_state *state)
13711 {
13712 return drm_atomic_helper_prepare_planes(state->base.dev,
13713 &state->base);
13714 }
13715
13716 u32 intel_crtc_get_vblank_counter(struct intel_crtc *crtc)
13717 {
13718 struct drm_device *dev = crtc->base.dev;
13719 struct drm_vblank_crtc *vblank = &dev->vblank[drm_crtc_index(&crtc->base)];
13720
13721 if (!vblank->max_vblank_count)
13722 return (u32)drm_crtc_accurate_vblank_count(&crtc->base);
13723
13724 return crtc->base.funcs->get_vblank_counter(&crtc->base);
13725 }
13726
13727 static void intel_update_crtc(struct intel_crtc *crtc,
13728 struct intel_atomic_state *state,
13729 struct intel_crtc_state *old_crtc_state,
13730 struct intel_crtc_state *new_crtc_state)
13731 {
13732 struct drm_device *dev = state->base.dev;
13733 struct drm_i915_private *dev_priv = to_i915(dev);
13734 bool modeset = needs_modeset(new_crtc_state);
13735 struct intel_plane_state *new_plane_state =
13736 intel_atomic_get_new_plane_state(state,
13737 to_intel_plane(crtc->base.primary));
13738
13739 if (modeset) {
13740 update_scanline_offset(new_crtc_state);
13741 dev_priv->display.crtc_enable(new_crtc_state, state);
13742
13743
13744 intel_crtc_enable_pipe_crc(crtc);
13745 } else {
13746 if (new_crtc_state->preload_luts &&
13747 (new_crtc_state->base.color_mgmt_changed ||
13748 new_crtc_state->update_pipe))
13749 intel_color_load_luts(new_crtc_state);
13750
13751 intel_pre_plane_update(old_crtc_state, new_crtc_state);
13752
13753 if (new_crtc_state->update_pipe)
13754 intel_encoders_update_pipe(crtc, new_crtc_state, state);
13755 }
13756
13757 if (new_crtc_state->update_pipe && !new_crtc_state->enable_fbc)
13758 intel_fbc_disable(crtc);
13759 else if (new_plane_state)
13760 intel_fbc_enable(crtc, new_crtc_state, new_plane_state);
13761
13762 intel_begin_crtc_commit(state, crtc);
13763
13764 if (INTEL_GEN(dev_priv) >= 9)
13765 skl_update_planes_on_crtc(state, crtc);
13766 else
13767 i9xx_update_planes_on_crtc(state, crtc);
13768
13769 intel_finish_crtc_commit(state, crtc);
13770 }
13771
13772 static void intel_update_crtcs(struct intel_atomic_state *state)
13773 {
13774 struct intel_crtc *crtc;
13775 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
13776 int i;
13777
13778 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
13779 if (!new_crtc_state->base.active)
13780 continue;
13781
13782 intel_update_crtc(crtc, state, old_crtc_state,
13783 new_crtc_state);
13784 }
13785 }
13786
13787 static void skl_update_crtcs(struct intel_atomic_state *state)
13788 {
13789 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
13790 struct intel_crtc *crtc;
13791 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
13792 unsigned int updated = 0;
13793 bool progress;
13794 enum pipe pipe;
13795 int i;
13796 u8 hw_enabled_slices = dev_priv->wm.skl_hw.ddb.enabled_slices;
13797 u8 required_slices = state->wm_results.ddb.enabled_slices;
13798 struct skl_ddb_entry entries[I915_MAX_PIPES] = {};
13799
13800 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i)
13801
13802 if (new_crtc_state->base.active)
13803 entries[i] = old_crtc_state->wm.skl.ddb;
13804
13805
13806 if (INTEL_GEN(dev_priv) >= 11 && required_slices > hw_enabled_slices)
13807 icl_dbuf_slices_update(dev_priv, required_slices);
13808
13809
13810
13811
13812
13813
13814
13815 do {
13816 progress = false;
13817
13818 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
13819 bool vbl_wait = false;
13820 unsigned int cmask = drm_crtc_mask(&crtc->base);
13821
13822 pipe = crtc->pipe;
13823
13824 if (updated & cmask || !new_crtc_state->base.active)
13825 continue;
13826
13827 if (skl_ddb_allocation_overlaps(&new_crtc_state->wm.skl.ddb,
13828 entries,
13829 INTEL_INFO(dev_priv)->num_pipes, i))
13830 continue;
13831
13832 updated |= cmask;
13833 entries[i] = new_crtc_state->wm.skl.ddb;
13834
13835
13836
13837
13838
13839
13840
13841 if (!skl_ddb_entry_equal(&new_crtc_state->wm.skl.ddb,
13842 &old_crtc_state->wm.skl.ddb) &&
13843 !new_crtc_state->base.active_changed &&
13844 state->wm_results.dirty_pipes != updated)
13845 vbl_wait = true;
13846
13847 intel_update_crtc(crtc, state, old_crtc_state,
13848 new_crtc_state);
13849
13850 if (vbl_wait)
13851 intel_wait_for_vblank(dev_priv, pipe);
13852
13853 progress = true;
13854 }
13855 } while (progress);
13856
13857
13858 if (INTEL_GEN(dev_priv) >= 11 && required_slices < hw_enabled_slices)
13859 icl_dbuf_slices_update(dev_priv, required_slices);
13860 }
13861
13862 static void intel_atomic_helper_free_state(struct drm_i915_private *dev_priv)
13863 {
13864 struct intel_atomic_state *state, *next;
13865 struct llist_node *freed;
13866
13867 freed = llist_del_all(&dev_priv->atomic_helper.free_list);
13868 llist_for_each_entry_safe(state, next, freed, freed)
13869 drm_atomic_state_put(&state->base);
13870 }
13871
13872 static void intel_atomic_helper_free_state_worker(struct work_struct *work)
13873 {
13874 struct drm_i915_private *dev_priv =
13875 container_of(work, typeof(*dev_priv), atomic_helper.free_work);
13876
13877 intel_atomic_helper_free_state(dev_priv);
13878 }
13879
13880 static void intel_atomic_commit_fence_wait(struct intel_atomic_state *intel_state)
13881 {
13882 struct wait_queue_entry wait_fence, wait_reset;
13883 struct drm_i915_private *dev_priv = to_i915(intel_state->base.dev);
13884
13885 init_wait_entry(&wait_fence, 0);
13886 init_wait_entry(&wait_reset, 0);
13887 for (;;) {
13888 prepare_to_wait(&intel_state->commit_ready.wait,
13889 &wait_fence, TASK_UNINTERRUPTIBLE);
13890 prepare_to_wait(bit_waitqueue(&dev_priv->gt.reset.flags,
13891 I915_RESET_MODESET),
13892 &wait_reset, TASK_UNINTERRUPTIBLE);
13893
13894
13895 if (i915_sw_fence_done(&intel_state->commit_ready) ||
13896 test_bit(I915_RESET_MODESET, &dev_priv->gt.reset.flags))
13897 break;
13898
13899 schedule();
13900 }
13901 finish_wait(&intel_state->commit_ready.wait, &wait_fence);
13902 finish_wait(bit_waitqueue(&dev_priv->gt.reset.flags,
13903 I915_RESET_MODESET),
13904 &wait_reset);
13905 }
13906
13907 static void intel_atomic_cleanup_work(struct work_struct *work)
13908 {
13909 struct drm_atomic_state *state =
13910 container_of(work, struct drm_atomic_state, commit_work);
13911 struct drm_i915_private *i915 = to_i915(state->dev);
13912
13913 drm_atomic_helper_cleanup_planes(&i915->drm, state);
13914 drm_atomic_helper_commit_cleanup_done(state);
13915 drm_atomic_state_put(state);
13916
13917 intel_atomic_helper_free_state(i915);
13918 }
13919
13920 static void intel_atomic_commit_tail(struct intel_atomic_state *state)
13921 {
13922 struct drm_device *dev = state->base.dev;
13923 struct drm_i915_private *dev_priv = to_i915(dev);
13924 struct intel_crtc_state *new_crtc_state, *old_crtc_state;
13925 struct intel_crtc *crtc;
13926 u64 put_domains[I915_MAX_PIPES] = {};
13927 intel_wakeref_t wakeref = 0;
13928 int i;
13929
13930 intel_atomic_commit_fence_wait(state);
13931
13932 drm_atomic_helper_wait_for_dependencies(&state->base);
13933
13934 if (state->modeset)
13935 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_MODESET);
13936
13937 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
13938 if (needs_modeset(new_crtc_state) ||
13939 new_crtc_state->update_pipe) {
13940
13941 put_domains[crtc->pipe] =
13942 modeset_get_crtc_power_domains(new_crtc_state);
13943 }
13944
13945 if (!needs_modeset(new_crtc_state))
13946 continue;
13947
13948 intel_pre_plane_update(old_crtc_state, new_crtc_state);
13949
13950 if (old_crtc_state->base.active) {
13951 intel_crtc_disable_planes(state, crtc);
13952
13953
13954
13955
13956
13957 intel_crtc_disable_pipe_crc(crtc);
13958
13959 dev_priv->display.crtc_disable(old_crtc_state, state);
13960 crtc->active = false;
13961 intel_fbc_disable(crtc);
13962 intel_disable_shared_dpll(old_crtc_state);
13963
13964
13965
13966
13967
13968 intel_check_cpu_fifo_underruns(dev_priv);
13969 intel_check_pch_fifo_underruns(dev_priv);
13970
13971
13972 if (!new_crtc_state->base.active &&
13973 !HAS_GMCH(dev_priv) &&
13974 dev_priv->display.initial_watermarks)
13975 dev_priv->display.initial_watermarks(state,
13976 new_crtc_state);
13977 }
13978 }
13979
13980
13981 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i)
13982 crtc->config = new_crtc_state;
13983
13984 if (state->modeset) {
13985 drm_atomic_helper_update_legacy_modeset_state(dev, &state->base);
13986
13987 intel_set_cdclk_pre_plane_update(dev_priv,
13988 &state->cdclk.actual,
13989 &dev_priv->cdclk.actual,
13990 state->cdclk.pipe);
13991
13992
13993
13994
13995
13996 if (!intel_can_enable_sagv(state))
13997 intel_disable_sagv(dev_priv);
13998
13999 intel_modeset_verify_disabled(dev_priv, state);
14000 }
14001
14002
14003 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
14004 bool modeset = needs_modeset(new_crtc_state);
14005
14006
14007 if (modeset && !new_crtc_state->base.active && new_crtc_state->base.event) {
14008 spin_lock_irq(&dev->event_lock);
14009 drm_crtc_send_vblank_event(&crtc->base, new_crtc_state->base.event);
14010 spin_unlock_irq(&dev->event_lock);
14011
14012 new_crtc_state->base.event = NULL;
14013 }
14014 }
14015
14016 if (state->modeset)
14017 intel_encoders_update_prepare(state);
14018
14019
14020 dev_priv->display.update_crtcs(state);
14021
14022 if (state->modeset) {
14023 intel_encoders_update_complete(state);
14024
14025 intel_set_cdclk_post_plane_update(dev_priv,
14026 &state->cdclk.actual,
14027 &dev_priv->cdclk.actual,
14028 state->cdclk.pipe);
14029 }
14030
14031
14032
14033
14034
14035
14036
14037
14038
14039
14040 drm_atomic_helper_wait_for_flip_done(dev, &state->base);
14041
14042 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
14043 if (new_crtc_state->base.active &&
14044 !needs_modeset(new_crtc_state) &&
14045 !new_crtc_state->preload_luts &&
14046 (new_crtc_state->base.color_mgmt_changed ||
14047 new_crtc_state->update_pipe))
14048 intel_color_load_luts(new_crtc_state);
14049 }
14050
14051
14052
14053
14054
14055
14056
14057
14058 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i) {
14059 if (dev_priv->display.optimize_watermarks)
14060 dev_priv->display.optimize_watermarks(state,
14061 new_crtc_state);
14062 }
14063
14064 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
14065 intel_post_plane_update(old_crtc_state);
14066
14067 if (put_domains[i])
14068 modeset_put_power_domains(dev_priv, put_domains[i]);
14069
14070 intel_modeset_verify_crtc(crtc, state, old_crtc_state, new_crtc_state);
14071 }
14072
14073 if (state->modeset)
14074 intel_verify_planes(state);
14075
14076 if (state->modeset && intel_can_enable_sagv(state))
14077 intel_enable_sagv(dev_priv);
14078
14079 drm_atomic_helper_commit_hw_done(&state->base);
14080
14081 if (state->modeset) {
14082
14083
14084
14085
14086
14087
14088 intel_uncore_arm_unclaimed_mmio_detection(&dev_priv->uncore);
14089 intel_display_power_put(dev_priv, POWER_DOMAIN_MODESET, wakeref);
14090 }
14091 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
14092
14093
14094
14095
14096
14097
14098
14099
14100
14101 INIT_WORK(&state->base.commit_work, intel_atomic_cleanup_work);
14102 queue_work(system_highpri_wq, &state->base.commit_work);
14103 }
14104
14105 static void intel_atomic_commit_work(struct work_struct *work)
14106 {
14107 struct intel_atomic_state *state =
14108 container_of(work, struct intel_atomic_state, base.commit_work);
14109
14110 intel_atomic_commit_tail(state);
14111 }
14112
14113 static int __i915_sw_fence_call
14114 intel_atomic_commit_ready(struct i915_sw_fence *fence,
14115 enum i915_sw_fence_notify notify)
14116 {
14117 struct intel_atomic_state *state =
14118 container_of(fence, struct intel_atomic_state, commit_ready);
14119
14120 switch (notify) {
14121 case FENCE_COMPLETE:
14122
14123 break;
14124 case FENCE_FREE:
14125 {
14126 struct intel_atomic_helper *helper =
14127 &to_i915(state->base.dev)->atomic_helper;
14128
14129 if (llist_add(&state->freed, &helper->free_list))
14130 schedule_work(&helper->free_work);
14131 break;
14132 }
14133 }
14134
14135 return NOTIFY_DONE;
14136 }
14137
14138 static void intel_atomic_track_fbs(struct intel_atomic_state *state)
14139 {
14140 struct intel_plane_state *old_plane_state, *new_plane_state;
14141 struct intel_plane *plane;
14142 int i;
14143
14144 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
14145 new_plane_state, i)
14146 intel_frontbuffer_track(to_intel_frontbuffer(old_plane_state->base.fb),
14147 to_intel_frontbuffer(new_plane_state->base.fb),
14148 plane->frontbuffer_bit);
14149 }
14150
14151 static int intel_atomic_commit(struct drm_device *dev,
14152 struct drm_atomic_state *_state,
14153 bool nonblock)
14154 {
14155 struct intel_atomic_state *state = to_intel_atomic_state(_state);
14156 struct drm_i915_private *dev_priv = to_i915(dev);
14157 int ret = 0;
14158
14159 state->wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
14160
14161 drm_atomic_state_get(&state->base);
14162 i915_sw_fence_init(&state->commit_ready,
14163 intel_atomic_commit_ready);
14164
14165
14166
14167
14168
14169
14170
14171
14172
14173
14174
14175
14176
14177
14178
14179
14180
14181
14182 if (INTEL_GEN(dev_priv) < 9 && state->base.legacy_cursor_update) {
14183 struct intel_crtc_state *new_crtc_state;
14184 struct intel_crtc *crtc;
14185 int i;
14186
14187 for_each_new_intel_crtc_in_state(state, crtc, new_crtc_state, i)
14188 if (new_crtc_state->wm.need_postvbl_update ||
14189 new_crtc_state->update_wm_post)
14190 state->base.legacy_cursor_update = false;
14191 }
14192
14193 ret = intel_atomic_prepare_commit(state);
14194 if (ret) {
14195 DRM_DEBUG_ATOMIC("Preparing state failed with %i\n", ret);
14196 i915_sw_fence_commit(&state->commit_ready);
14197 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
14198 return ret;
14199 }
14200
14201 ret = drm_atomic_helper_setup_commit(&state->base, nonblock);
14202 if (!ret)
14203 ret = drm_atomic_helper_swap_state(&state->base, true);
14204
14205 if (ret) {
14206 i915_sw_fence_commit(&state->commit_ready);
14207
14208 drm_atomic_helper_cleanup_planes(dev, &state->base);
14209 intel_runtime_pm_put(&dev_priv->runtime_pm, state->wakeref);
14210 return ret;
14211 }
14212 dev_priv->wm.distrust_bios_wm = false;
14213 intel_shared_dpll_swap_state(state);
14214 intel_atomic_track_fbs(state);
14215
14216 if (state->modeset) {
14217 memcpy(dev_priv->min_cdclk, state->min_cdclk,
14218 sizeof(state->min_cdclk));
14219 memcpy(dev_priv->min_voltage_level, state->min_voltage_level,
14220 sizeof(state->min_voltage_level));
14221 dev_priv->active_crtcs = state->active_crtcs;
14222 dev_priv->cdclk.force_min_cdclk = state->cdclk.force_min_cdclk;
14223
14224 intel_cdclk_swap_state(state);
14225 }
14226
14227 drm_atomic_state_get(&state->base);
14228 INIT_WORK(&state->base.commit_work, intel_atomic_commit_work);
14229
14230 i915_sw_fence_commit(&state->commit_ready);
14231 if (nonblock && state->modeset) {
14232 queue_work(dev_priv->modeset_wq, &state->base.commit_work);
14233 } else if (nonblock) {
14234 queue_work(system_unbound_wq, &state->base.commit_work);
14235 } else {
14236 if (state->modeset)
14237 flush_workqueue(dev_priv->modeset_wq);
14238 intel_atomic_commit_tail(state);
14239 }
14240
14241 return 0;
14242 }
14243
14244 struct wait_rps_boost {
14245 struct wait_queue_entry wait;
14246
14247 struct drm_crtc *crtc;
14248 struct i915_request *request;
14249 };
14250
14251 static int do_rps_boost(struct wait_queue_entry *_wait,
14252 unsigned mode, int sync, void *key)
14253 {
14254 struct wait_rps_boost *wait = container_of(_wait, typeof(*wait), wait);
14255 struct i915_request *rq = wait->request;
14256
14257
14258
14259
14260
14261
14262 if (!i915_request_started(rq))
14263 gen6_rps_boost(rq);
14264 i915_request_put(rq);
14265
14266 drm_crtc_vblank_put(wait->crtc);
14267
14268 list_del(&wait->wait.entry);
14269 kfree(wait);
14270 return 1;
14271 }
14272
14273 static void add_rps_boost_after_vblank(struct drm_crtc *crtc,
14274 struct dma_fence *fence)
14275 {
14276 struct wait_rps_boost *wait;
14277
14278 if (!dma_fence_is_i915(fence))
14279 return;
14280
14281 if (INTEL_GEN(to_i915(crtc->dev)) < 6)
14282 return;
14283
14284 if (drm_crtc_vblank_get(crtc))
14285 return;
14286
14287 wait = kmalloc(sizeof(*wait), GFP_KERNEL);
14288 if (!wait) {
14289 drm_crtc_vblank_put(crtc);
14290 return;
14291 }
14292
14293 wait->request = to_request(dma_fence_get(fence));
14294 wait->crtc = crtc;
14295
14296 wait->wait.func = do_rps_boost;
14297 wait->wait.flags = 0;
14298
14299 add_wait_queue(drm_crtc_vblank_waitqueue(crtc), &wait->wait);
14300 }
14301
14302 static int intel_plane_pin_fb(struct intel_plane_state *plane_state)
14303 {
14304 struct intel_plane *plane = to_intel_plane(plane_state->base.plane);
14305 struct drm_i915_private *dev_priv = to_i915(plane->base.dev);
14306 struct drm_framebuffer *fb = plane_state->base.fb;
14307 struct i915_vma *vma;
14308
14309 if (plane->id == PLANE_CURSOR &&
14310 INTEL_INFO(dev_priv)->display.cursor_needs_physical) {
14311 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
14312 const int align = intel_cursor_alignment(dev_priv);
14313 int err;
14314
14315 err = i915_gem_object_attach_phys(obj, align);
14316 if (err)
14317 return err;
14318 }
14319
14320 vma = intel_pin_and_fence_fb_obj(fb,
14321 &plane_state->view,
14322 intel_plane_uses_fence(plane_state),
14323 &plane_state->flags);
14324 if (IS_ERR(vma))
14325 return PTR_ERR(vma);
14326
14327 plane_state->vma = vma;
14328
14329 return 0;
14330 }
14331
14332 static void intel_plane_unpin_fb(struct intel_plane_state *old_plane_state)
14333 {
14334 struct i915_vma *vma;
14335
14336 vma = fetch_and_zero(&old_plane_state->vma);
14337 if (vma)
14338 intel_unpin_fb_vma(vma, old_plane_state->flags);
14339 }
14340
14341 static void fb_obj_bump_render_priority(struct drm_i915_gem_object *obj)
14342 {
14343 struct i915_sched_attr attr = {
14344 .priority = I915_PRIORITY_DISPLAY,
14345 };
14346
14347 i915_gem_object_wait_priority(obj, 0, &attr);
14348 }
14349
14350
14351
14352
14353
14354
14355
14356
14357
14358
14359
14360
14361
14362
14363
14364 int
14365 intel_prepare_plane_fb(struct drm_plane *plane,
14366 struct drm_plane_state *new_state)
14367 {
14368 struct intel_atomic_state *intel_state =
14369 to_intel_atomic_state(new_state->state);
14370 struct drm_i915_private *dev_priv = to_i915(plane->dev);
14371 struct drm_framebuffer *fb = new_state->fb;
14372 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
14373 struct drm_i915_gem_object *old_obj = intel_fb_obj(plane->state->fb);
14374 int ret;
14375
14376 if (old_obj) {
14377 struct intel_crtc_state *crtc_state =
14378 intel_atomic_get_new_crtc_state(intel_state,
14379 to_intel_crtc(plane->state->crtc));
14380
14381
14382
14383
14384
14385
14386
14387
14388
14389
14390
14391
14392 if (needs_modeset(crtc_state)) {
14393 ret = i915_sw_fence_await_reservation(&intel_state->commit_ready,
14394 old_obj->base.resv, NULL,
14395 false, 0,
14396 GFP_KERNEL);
14397 if (ret < 0)
14398 return ret;
14399 }
14400 }
14401
14402 if (new_state->fence) {
14403 ret = i915_sw_fence_await_dma_fence(&intel_state->commit_ready,
14404 new_state->fence,
14405 I915_FENCE_TIMEOUT,
14406 GFP_KERNEL);
14407 if (ret < 0)
14408 return ret;
14409 }
14410
14411 if (!obj)
14412 return 0;
14413
14414 ret = i915_gem_object_pin_pages(obj);
14415 if (ret)
14416 return ret;
14417
14418 ret = mutex_lock_interruptible(&dev_priv->drm.struct_mutex);
14419 if (ret) {
14420 i915_gem_object_unpin_pages(obj);
14421 return ret;
14422 }
14423
14424 ret = intel_plane_pin_fb(to_intel_plane_state(new_state));
14425
14426 mutex_unlock(&dev_priv->drm.struct_mutex);
14427 i915_gem_object_unpin_pages(obj);
14428 if (ret)
14429 return ret;
14430
14431 fb_obj_bump_render_priority(obj);
14432 intel_frontbuffer_flush(obj->frontbuffer, ORIGIN_DIRTYFB);
14433
14434 if (!new_state->fence) {
14435 struct dma_fence *fence;
14436
14437 ret = i915_sw_fence_await_reservation(&intel_state->commit_ready,
14438 obj->base.resv, NULL,
14439 false, I915_FENCE_TIMEOUT,
14440 GFP_KERNEL);
14441 if (ret < 0)
14442 return ret;
14443
14444 fence = dma_resv_get_excl_rcu(obj->base.resv);
14445 if (fence) {
14446 add_rps_boost_after_vblank(new_state->crtc, fence);
14447 dma_fence_put(fence);
14448 }
14449 } else {
14450 add_rps_boost_after_vblank(new_state->crtc, new_state->fence);
14451 }
14452
14453
14454
14455
14456
14457
14458
14459
14460
14461 if (!intel_state->rps_interactive) {
14462 intel_rps_mark_interactive(dev_priv, true);
14463 intel_state->rps_interactive = true;
14464 }
14465
14466 return 0;
14467 }
14468
14469
14470
14471
14472
14473
14474
14475
14476
14477
14478 void
14479 intel_cleanup_plane_fb(struct drm_plane *plane,
14480 struct drm_plane_state *old_state)
14481 {
14482 struct intel_atomic_state *intel_state =
14483 to_intel_atomic_state(old_state->state);
14484 struct drm_i915_private *dev_priv = to_i915(plane->dev);
14485
14486 if (intel_state->rps_interactive) {
14487 intel_rps_mark_interactive(dev_priv, false);
14488 intel_state->rps_interactive = false;
14489 }
14490
14491
14492 mutex_lock(&dev_priv->drm.struct_mutex);
14493 intel_plane_unpin_fb(to_intel_plane_state(old_state));
14494 mutex_unlock(&dev_priv->drm.struct_mutex);
14495 }
14496
14497 int
14498 skl_max_scale(const struct intel_crtc_state *crtc_state,
14499 u32 pixel_format)
14500 {
14501 struct intel_crtc *crtc = to_intel_crtc(crtc_state->base.crtc);
14502 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14503 int max_scale, mult;
14504 int crtc_clock, max_dotclk, tmpclk1, tmpclk2;
14505
14506 if (!crtc_state->base.enable)
14507 return DRM_PLANE_HELPER_NO_SCALING;
14508
14509 crtc_clock = crtc_state->base.adjusted_mode.crtc_clock;
14510 max_dotclk = to_intel_atomic_state(crtc_state->base.state)->cdclk.logical.cdclk;
14511
14512 if (IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10)
14513 max_dotclk *= 2;
14514
14515 if (WARN_ON_ONCE(!crtc_clock || max_dotclk < crtc_clock))
14516 return DRM_PLANE_HELPER_NO_SCALING;
14517
14518
14519
14520
14521
14522
14523
14524 mult = is_planar_yuv_format(pixel_format) ? 2 : 3;
14525 tmpclk1 = (1 << 16) * mult - 1;
14526 tmpclk2 = (1 << 8) * ((max_dotclk << 8) / crtc_clock);
14527 max_scale = min(tmpclk1, tmpclk2);
14528
14529 return max_scale;
14530 }
14531
14532 static void intel_begin_crtc_commit(struct intel_atomic_state *state,
14533 struct intel_crtc *crtc)
14534 {
14535 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14536 struct intel_crtc_state *old_crtc_state =
14537 intel_atomic_get_old_crtc_state(state, crtc);
14538 struct intel_crtc_state *new_crtc_state =
14539 intel_atomic_get_new_crtc_state(state, crtc);
14540 bool modeset = needs_modeset(new_crtc_state);
14541
14542
14543 intel_pipe_update_start(new_crtc_state);
14544
14545 if (modeset)
14546 goto out;
14547
14548 if (new_crtc_state->base.color_mgmt_changed ||
14549 new_crtc_state->update_pipe)
14550 intel_color_commit(new_crtc_state);
14551
14552 if (new_crtc_state->update_pipe)
14553 intel_update_pipe_config(old_crtc_state, new_crtc_state);
14554 else if (INTEL_GEN(dev_priv) >= 9)
14555 skl_detach_scalers(new_crtc_state);
14556
14557 if (INTEL_GEN(dev_priv) >= 9 || IS_BROADWELL(dev_priv))
14558 bdw_set_pipemisc(new_crtc_state);
14559
14560 out:
14561 if (dev_priv->display.atomic_update_watermarks)
14562 dev_priv->display.atomic_update_watermarks(state,
14563 new_crtc_state);
14564 }
14565
14566 void intel_crtc_arm_fifo_underrun(struct intel_crtc *crtc,
14567 struct intel_crtc_state *crtc_state)
14568 {
14569 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
14570
14571 if (!IS_GEN(dev_priv, 2))
14572 intel_set_cpu_fifo_underrun_reporting(dev_priv, crtc->pipe, true);
14573
14574 if (crtc_state->has_pch_encoder) {
14575 enum pipe pch_transcoder =
14576 intel_crtc_pch_transcoder(crtc);
14577
14578 intel_set_pch_fifo_underrun_reporting(dev_priv, pch_transcoder, true);
14579 }
14580 }
14581
14582 static void intel_finish_crtc_commit(struct intel_atomic_state *state,
14583 struct intel_crtc *crtc)
14584 {
14585 struct intel_crtc_state *old_crtc_state =
14586 intel_atomic_get_old_crtc_state(state, crtc);
14587 struct intel_crtc_state *new_crtc_state =
14588 intel_atomic_get_new_crtc_state(state, crtc);
14589
14590 intel_pipe_update_end(new_crtc_state);
14591
14592 if (new_crtc_state->update_pipe &&
14593 !needs_modeset(new_crtc_state) &&
14594 old_crtc_state->base.mode.private_flags & I915_MODE_FLAG_INHERITED)
14595 intel_crtc_arm_fifo_underrun(crtc, new_crtc_state);
14596 }
14597
14598
14599
14600
14601
14602
14603
14604
14605 void intel_plane_destroy(struct drm_plane *plane)
14606 {
14607 drm_plane_cleanup(plane);
14608 kfree(to_intel_plane(plane));
14609 }
14610
14611 static bool i8xx_plane_format_mod_supported(struct drm_plane *_plane,
14612 u32 format, u64 modifier)
14613 {
14614 switch (modifier) {
14615 case DRM_FORMAT_MOD_LINEAR:
14616 case I915_FORMAT_MOD_X_TILED:
14617 break;
14618 default:
14619 return false;
14620 }
14621
14622 switch (format) {
14623 case DRM_FORMAT_C8:
14624 case DRM_FORMAT_RGB565:
14625 case DRM_FORMAT_XRGB1555:
14626 case DRM_FORMAT_XRGB8888:
14627 return modifier == DRM_FORMAT_MOD_LINEAR ||
14628 modifier == I915_FORMAT_MOD_X_TILED;
14629 default:
14630 return false;
14631 }
14632 }
14633
14634 static bool i965_plane_format_mod_supported(struct drm_plane *_plane,
14635 u32 format, u64 modifier)
14636 {
14637 switch (modifier) {
14638 case DRM_FORMAT_MOD_LINEAR:
14639 case I915_FORMAT_MOD_X_TILED:
14640 break;
14641 default:
14642 return false;
14643 }
14644
14645 switch (format) {
14646 case DRM_FORMAT_C8:
14647 case DRM_FORMAT_RGB565:
14648 case DRM_FORMAT_XRGB8888:
14649 case DRM_FORMAT_XBGR8888:
14650 case DRM_FORMAT_XRGB2101010:
14651 case DRM_FORMAT_XBGR2101010:
14652 return modifier == DRM_FORMAT_MOD_LINEAR ||
14653 modifier == I915_FORMAT_MOD_X_TILED;
14654 default:
14655 return false;
14656 }
14657 }
14658
14659 static bool intel_cursor_format_mod_supported(struct drm_plane *_plane,
14660 u32 format, u64 modifier)
14661 {
14662 return modifier == DRM_FORMAT_MOD_LINEAR &&
14663 format == DRM_FORMAT_ARGB8888;
14664 }
14665
14666 static const struct drm_plane_funcs i965_plane_funcs = {
14667 .update_plane = drm_atomic_helper_update_plane,
14668 .disable_plane = drm_atomic_helper_disable_plane,
14669 .destroy = intel_plane_destroy,
14670 .atomic_duplicate_state = intel_plane_duplicate_state,
14671 .atomic_destroy_state = intel_plane_destroy_state,
14672 .format_mod_supported = i965_plane_format_mod_supported,
14673 };
14674
14675 static const struct drm_plane_funcs i8xx_plane_funcs = {
14676 .update_plane = drm_atomic_helper_update_plane,
14677 .disable_plane = drm_atomic_helper_disable_plane,
14678 .destroy = intel_plane_destroy,
14679 .atomic_duplicate_state = intel_plane_duplicate_state,
14680 .atomic_destroy_state = intel_plane_destroy_state,
14681 .format_mod_supported = i8xx_plane_format_mod_supported,
14682 };
14683
14684 static int
14685 intel_legacy_cursor_update(struct drm_plane *plane,
14686 struct drm_crtc *crtc,
14687 struct drm_framebuffer *fb,
14688 int crtc_x, int crtc_y,
14689 unsigned int crtc_w, unsigned int crtc_h,
14690 u32 src_x, u32 src_y,
14691 u32 src_w, u32 src_h,
14692 struct drm_modeset_acquire_ctx *ctx)
14693 {
14694 struct drm_i915_private *dev_priv = to_i915(crtc->dev);
14695 struct drm_plane_state *old_plane_state, *new_plane_state;
14696 struct intel_plane *intel_plane = to_intel_plane(plane);
14697 struct intel_crtc_state *crtc_state =
14698 to_intel_crtc_state(crtc->state);
14699 struct intel_crtc_state *new_crtc_state;
14700 int ret;
14701
14702
14703
14704
14705
14706 if (!crtc_state->base.active || needs_modeset(crtc_state) ||
14707 crtc_state->update_pipe)
14708 goto slow;
14709
14710 old_plane_state = plane->state;
14711
14712
14713
14714
14715
14716 if (old_plane_state->commit &&
14717 !try_wait_for_completion(&old_plane_state->commit->hw_done))
14718 goto slow;
14719
14720
14721
14722
14723
14724
14725 if (old_plane_state->crtc != crtc ||
14726 old_plane_state->src_w != src_w ||
14727 old_plane_state->src_h != src_h ||
14728 old_plane_state->crtc_w != crtc_w ||
14729 old_plane_state->crtc_h != crtc_h ||
14730 !old_plane_state->fb != !fb)
14731 goto slow;
14732
14733 new_plane_state = intel_plane_duplicate_state(plane);
14734 if (!new_plane_state)
14735 return -ENOMEM;
14736
14737 new_crtc_state = to_intel_crtc_state(intel_crtc_duplicate_state(crtc));
14738 if (!new_crtc_state) {
14739 ret = -ENOMEM;
14740 goto out_free;
14741 }
14742
14743 drm_atomic_set_fb_for_plane(new_plane_state, fb);
14744
14745 new_plane_state->src_x = src_x;
14746 new_plane_state->src_y = src_y;
14747 new_plane_state->src_w = src_w;
14748 new_plane_state->src_h = src_h;
14749 new_plane_state->crtc_x = crtc_x;
14750 new_plane_state->crtc_y = crtc_y;
14751 new_plane_state->crtc_w = crtc_w;
14752 new_plane_state->crtc_h = crtc_h;
14753
14754 ret = intel_plane_atomic_check_with_state(crtc_state, new_crtc_state,
14755 to_intel_plane_state(old_plane_state),
14756 to_intel_plane_state(new_plane_state));
14757 if (ret)
14758 goto out_free;
14759
14760 ret = mutex_lock_interruptible(&dev_priv->drm.struct_mutex);
14761 if (ret)
14762 goto out_free;
14763
14764 ret = intel_plane_pin_fb(to_intel_plane_state(new_plane_state));
14765 if (ret)
14766 goto out_unlock;
14767
14768 intel_frontbuffer_flush(to_intel_frontbuffer(fb), ORIGIN_FLIP);
14769 intel_frontbuffer_track(to_intel_frontbuffer(old_plane_state->fb),
14770 to_intel_frontbuffer(fb),
14771 intel_plane->frontbuffer_bit);
14772
14773
14774 plane->state = new_plane_state;
14775
14776
14777
14778
14779
14780
14781
14782
14783
14784
14785
14786 crtc_state->active_planes = new_crtc_state->active_planes;
14787
14788 if (plane->state->visible)
14789 intel_update_plane(intel_plane, crtc_state,
14790 to_intel_plane_state(plane->state));
14791 else
14792 intel_disable_plane(intel_plane, crtc_state);
14793
14794 intel_plane_unpin_fb(to_intel_plane_state(old_plane_state));
14795
14796 out_unlock:
14797 mutex_unlock(&dev_priv->drm.struct_mutex);
14798 out_free:
14799 if (new_crtc_state)
14800 intel_crtc_destroy_state(crtc, &new_crtc_state->base);
14801 if (ret)
14802 intel_plane_destroy_state(plane, new_plane_state);
14803 else
14804 intel_plane_destroy_state(plane, old_plane_state);
14805 return ret;
14806
14807 slow:
14808 return drm_atomic_helper_update_plane(plane, crtc, fb,
14809 crtc_x, crtc_y, crtc_w, crtc_h,
14810 src_x, src_y, src_w, src_h, ctx);
14811 }
14812
14813 static const struct drm_plane_funcs intel_cursor_plane_funcs = {
14814 .update_plane = intel_legacy_cursor_update,
14815 .disable_plane = drm_atomic_helper_disable_plane,
14816 .destroy = intel_plane_destroy,
14817 .atomic_duplicate_state = intel_plane_duplicate_state,
14818 .atomic_destroy_state = intel_plane_destroy_state,
14819 .format_mod_supported = intel_cursor_format_mod_supported,
14820 };
14821
14822 static bool i9xx_plane_has_fbc(struct drm_i915_private *dev_priv,
14823 enum i9xx_plane_id i9xx_plane)
14824 {
14825 if (!HAS_FBC(dev_priv))
14826 return false;
14827
14828 if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
14829 return i9xx_plane == PLANE_A;
14830 else if (IS_IVYBRIDGE(dev_priv))
14831 return i9xx_plane == PLANE_A || i9xx_plane == PLANE_B ||
14832 i9xx_plane == PLANE_C;
14833 else if (INTEL_GEN(dev_priv) >= 4)
14834 return i9xx_plane == PLANE_A || i9xx_plane == PLANE_B;
14835 else
14836 return i9xx_plane == PLANE_A;
14837 }
14838
14839 static struct intel_plane *
14840 intel_primary_plane_create(struct drm_i915_private *dev_priv, enum pipe pipe)
14841 {
14842 struct intel_plane *plane;
14843 const struct drm_plane_funcs *plane_funcs;
14844 unsigned int supported_rotations;
14845 unsigned int possible_crtcs;
14846 const u64 *modifiers;
14847 const u32 *formats;
14848 int num_formats;
14849 int ret;
14850
14851 if (INTEL_GEN(dev_priv) >= 9)
14852 return skl_universal_plane_create(dev_priv, pipe,
14853 PLANE_PRIMARY);
14854
14855 plane = intel_plane_alloc();
14856 if (IS_ERR(plane))
14857 return plane;
14858
14859 plane->pipe = pipe;
14860
14861
14862
14863
14864 if (HAS_FBC(dev_priv) && INTEL_GEN(dev_priv) < 4)
14865 plane->i9xx_plane = (enum i9xx_plane_id) !pipe;
14866 else
14867 plane->i9xx_plane = (enum i9xx_plane_id) pipe;
14868 plane->id = PLANE_PRIMARY;
14869 plane->frontbuffer_bit = INTEL_FRONTBUFFER(pipe, plane->id);
14870
14871 plane->has_fbc = i9xx_plane_has_fbc(dev_priv, plane->i9xx_plane);
14872 if (plane->has_fbc) {
14873 struct intel_fbc *fbc = &dev_priv->fbc;
14874
14875 fbc->possible_framebuffer_bits |= plane->frontbuffer_bit;
14876 }
14877
14878 if (INTEL_GEN(dev_priv) >= 4) {
14879 formats = i965_primary_formats;
14880 num_formats = ARRAY_SIZE(i965_primary_formats);
14881 modifiers = i9xx_format_modifiers;
14882
14883 plane->max_stride = i9xx_plane_max_stride;
14884 plane->update_plane = i9xx_update_plane;
14885 plane->disable_plane = i9xx_disable_plane;
14886 plane->get_hw_state = i9xx_plane_get_hw_state;
14887 plane->check_plane = i9xx_plane_check;
14888
14889 plane_funcs = &i965_plane_funcs;
14890 } else {
14891 formats = i8xx_primary_formats;
14892 num_formats = ARRAY_SIZE(i8xx_primary_formats);
14893 modifiers = i9xx_format_modifiers;
14894
14895 plane->max_stride = i9xx_plane_max_stride;
14896 plane->update_plane = i9xx_update_plane;
14897 plane->disable_plane = i9xx_disable_plane;
14898 plane->get_hw_state = i9xx_plane_get_hw_state;
14899 plane->check_plane = i9xx_plane_check;
14900
14901 plane_funcs = &i8xx_plane_funcs;
14902 }
14903
14904 possible_crtcs = BIT(pipe);
14905
14906 if (INTEL_GEN(dev_priv) >= 5 || IS_G4X(dev_priv))
14907 ret = drm_universal_plane_init(&dev_priv->drm, &plane->base,
14908 possible_crtcs, plane_funcs,
14909 formats, num_formats, modifiers,
14910 DRM_PLANE_TYPE_PRIMARY,
14911 "primary %c", pipe_name(pipe));
14912 else
14913 ret = drm_universal_plane_init(&dev_priv->drm, &plane->base,
14914 possible_crtcs, plane_funcs,
14915 formats, num_formats, modifiers,
14916 DRM_PLANE_TYPE_PRIMARY,
14917 "plane %c",
14918 plane_name(plane->i9xx_plane));
14919 if (ret)
14920 goto fail;
14921
14922 if (IS_CHERRYVIEW(dev_priv) && pipe == PIPE_B) {
14923 supported_rotations =
14924 DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_180 |
14925 DRM_MODE_REFLECT_X;
14926 } else if (INTEL_GEN(dev_priv) >= 4) {
14927 supported_rotations =
14928 DRM_MODE_ROTATE_0 | DRM_MODE_ROTATE_180;
14929 } else {
14930 supported_rotations = DRM_MODE_ROTATE_0;
14931 }
14932
14933 if (INTEL_GEN(dev_priv) >= 4)
14934 drm_plane_create_rotation_property(&plane->base,
14935 DRM_MODE_ROTATE_0,
14936 supported_rotations);
14937
14938 drm_plane_helper_add(&plane->base, &intel_plane_helper_funcs);
14939
14940 return plane;
14941
14942 fail:
14943 intel_plane_free(plane);
14944
14945 return ERR_PTR(ret);
14946 }
14947
14948 static struct intel_plane *
14949 intel_cursor_plane_create(struct drm_i915_private *dev_priv,
14950 enum pipe pipe)
14951 {
14952 unsigned int possible_crtcs;
14953 struct intel_plane *cursor;
14954 int ret;
14955
14956 cursor = intel_plane_alloc();
14957 if (IS_ERR(cursor))
14958 return cursor;
14959
14960 cursor->pipe = pipe;
14961 cursor->i9xx_plane = (enum i9xx_plane_id) pipe;
14962 cursor->id = PLANE_CURSOR;
14963 cursor->frontbuffer_bit = INTEL_FRONTBUFFER(pipe, cursor->id);
14964
14965 if (IS_I845G(dev_priv) || IS_I865G(dev_priv)) {
14966 cursor->max_stride = i845_cursor_max_stride;
14967 cursor->update_plane = i845_update_cursor;
14968 cursor->disable_plane = i845_disable_cursor;
14969 cursor->get_hw_state = i845_cursor_get_hw_state;
14970 cursor->check_plane = i845_check_cursor;
14971 } else {
14972 cursor->max_stride = i9xx_cursor_max_stride;
14973 cursor->update_plane = i9xx_update_cursor;
14974 cursor->disable_plane = i9xx_disable_cursor;
14975 cursor->get_hw_state = i9xx_cursor_get_hw_state;
14976 cursor->check_plane = i9xx_check_cursor;
14977 }
14978
14979 cursor->cursor.base = ~0;
14980 cursor->cursor.cntl = ~0;
14981
14982 if (IS_I845G(dev_priv) || IS_I865G(dev_priv) || HAS_CUR_FBC(dev_priv))
14983 cursor->cursor.size = ~0;
14984
14985 possible_crtcs = BIT(pipe);
14986
14987 ret = drm_universal_plane_init(&dev_priv->drm, &cursor->base,
14988 possible_crtcs, &intel_cursor_plane_funcs,
14989 intel_cursor_formats,
14990 ARRAY_SIZE(intel_cursor_formats),
14991 cursor_format_modifiers,
14992 DRM_PLANE_TYPE_CURSOR,
14993 "cursor %c", pipe_name(pipe));
14994 if (ret)
14995 goto fail;
14996
14997 if (INTEL_GEN(dev_priv) >= 4)
14998 drm_plane_create_rotation_property(&cursor->base,
14999 DRM_MODE_ROTATE_0,
15000 DRM_MODE_ROTATE_0 |
15001 DRM_MODE_ROTATE_180);
15002
15003 drm_plane_helper_add(&cursor->base, &intel_plane_helper_funcs);
15004
15005 return cursor;
15006
15007 fail:
15008 intel_plane_free(cursor);
15009
15010 return ERR_PTR(ret);
15011 }
15012
15013 static void intel_crtc_init_scalers(struct intel_crtc *crtc,
15014 struct intel_crtc_state *crtc_state)
15015 {
15016 struct intel_crtc_scaler_state *scaler_state =
15017 &crtc_state->scaler_state;
15018 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
15019 int i;
15020
15021 crtc->num_scalers = RUNTIME_INFO(dev_priv)->num_scalers[crtc->pipe];
15022 if (!crtc->num_scalers)
15023 return;
15024
15025 for (i = 0; i < crtc->num_scalers; i++) {
15026 struct intel_scaler *scaler = &scaler_state->scalers[i];
15027
15028 scaler->in_use = 0;
15029 scaler->mode = 0;
15030 }
15031
15032 scaler_state->scaler_id = -1;
15033 }
15034
15035 #define INTEL_CRTC_FUNCS \
15036 .gamma_set = drm_atomic_helper_legacy_gamma_set, \
15037 .set_config = drm_atomic_helper_set_config, \
15038 .destroy = intel_crtc_destroy, \
15039 .page_flip = drm_atomic_helper_page_flip, \
15040 .atomic_duplicate_state = intel_crtc_duplicate_state, \
15041 .atomic_destroy_state = intel_crtc_destroy_state, \
15042 .set_crc_source = intel_crtc_set_crc_source, \
15043 .verify_crc_source = intel_crtc_verify_crc_source, \
15044 .get_crc_sources = intel_crtc_get_crc_sources
15045
15046 static const struct drm_crtc_funcs bdw_crtc_funcs = {
15047 INTEL_CRTC_FUNCS,
15048
15049 .get_vblank_counter = g4x_get_vblank_counter,
15050 .enable_vblank = bdw_enable_vblank,
15051 .disable_vblank = bdw_disable_vblank,
15052 };
15053
15054 static const struct drm_crtc_funcs ilk_crtc_funcs = {
15055 INTEL_CRTC_FUNCS,
15056
15057 .get_vblank_counter = g4x_get_vblank_counter,
15058 .enable_vblank = ilk_enable_vblank,
15059 .disable_vblank = ilk_disable_vblank,
15060 };
15061
15062 static const struct drm_crtc_funcs g4x_crtc_funcs = {
15063 INTEL_CRTC_FUNCS,
15064
15065 .get_vblank_counter = g4x_get_vblank_counter,
15066 .enable_vblank = i965_enable_vblank,
15067 .disable_vblank = i965_disable_vblank,
15068 };
15069
15070 static const struct drm_crtc_funcs i965_crtc_funcs = {
15071 INTEL_CRTC_FUNCS,
15072
15073 .get_vblank_counter = i915_get_vblank_counter,
15074 .enable_vblank = i965_enable_vblank,
15075 .disable_vblank = i965_disable_vblank,
15076 };
15077
15078 static const struct drm_crtc_funcs i945gm_crtc_funcs = {
15079 INTEL_CRTC_FUNCS,
15080
15081 .get_vblank_counter = i915_get_vblank_counter,
15082 .enable_vblank = i945gm_enable_vblank,
15083 .disable_vblank = i945gm_disable_vblank,
15084 };
15085
15086 static const struct drm_crtc_funcs i915_crtc_funcs = {
15087 INTEL_CRTC_FUNCS,
15088
15089 .get_vblank_counter = i915_get_vblank_counter,
15090 .enable_vblank = i8xx_enable_vblank,
15091 .disable_vblank = i8xx_disable_vblank,
15092 };
15093
15094 static const struct drm_crtc_funcs i8xx_crtc_funcs = {
15095 INTEL_CRTC_FUNCS,
15096
15097
15098 .enable_vblank = i8xx_enable_vblank,
15099 .disable_vblank = i8xx_disable_vblank,
15100 };
15101
15102 static int intel_crtc_init(struct drm_i915_private *dev_priv, enum pipe pipe)
15103 {
15104 const struct drm_crtc_funcs *funcs;
15105 struct intel_crtc *intel_crtc;
15106 struct intel_crtc_state *crtc_state = NULL;
15107 struct intel_plane *primary = NULL;
15108 struct intel_plane *cursor = NULL;
15109 int sprite, ret;
15110
15111 intel_crtc = kzalloc(sizeof(*intel_crtc), GFP_KERNEL);
15112 if (!intel_crtc)
15113 return -ENOMEM;
15114
15115 crtc_state = kzalloc(sizeof(*crtc_state), GFP_KERNEL);
15116 if (!crtc_state) {
15117 ret = -ENOMEM;
15118 goto fail;
15119 }
15120 __drm_atomic_helper_crtc_reset(&intel_crtc->base, &crtc_state->base);
15121 intel_crtc->config = crtc_state;
15122
15123 primary = intel_primary_plane_create(dev_priv, pipe);
15124 if (IS_ERR(primary)) {
15125 ret = PTR_ERR(primary);
15126 goto fail;
15127 }
15128 intel_crtc->plane_ids_mask |= BIT(primary->id);
15129
15130 for_each_sprite(dev_priv, pipe, sprite) {
15131 struct intel_plane *plane;
15132
15133 plane = intel_sprite_plane_create(dev_priv, pipe, sprite);
15134 if (IS_ERR(plane)) {
15135 ret = PTR_ERR(plane);
15136 goto fail;
15137 }
15138 intel_crtc->plane_ids_mask |= BIT(plane->id);
15139 }
15140
15141 cursor = intel_cursor_plane_create(dev_priv, pipe);
15142 if (IS_ERR(cursor)) {
15143 ret = PTR_ERR(cursor);
15144 goto fail;
15145 }
15146 intel_crtc->plane_ids_mask |= BIT(cursor->id);
15147
15148 if (HAS_GMCH(dev_priv)) {
15149 if (IS_CHERRYVIEW(dev_priv) ||
15150 IS_VALLEYVIEW(dev_priv) || IS_G4X(dev_priv))
15151 funcs = &g4x_crtc_funcs;
15152 else if (IS_GEN(dev_priv, 4))
15153 funcs = &i965_crtc_funcs;
15154 else if (IS_I945GM(dev_priv))
15155 funcs = &i945gm_crtc_funcs;
15156 else if (IS_GEN(dev_priv, 3))
15157 funcs = &i915_crtc_funcs;
15158 else
15159 funcs = &i8xx_crtc_funcs;
15160 } else {
15161 if (INTEL_GEN(dev_priv) >= 8)
15162 funcs = &bdw_crtc_funcs;
15163 else
15164 funcs = &ilk_crtc_funcs;
15165 }
15166
15167 ret = drm_crtc_init_with_planes(&dev_priv->drm, &intel_crtc->base,
15168 &primary->base, &cursor->base,
15169 funcs, "pipe %c", pipe_name(pipe));
15170 if (ret)
15171 goto fail;
15172
15173 intel_crtc->pipe = pipe;
15174
15175
15176 intel_crtc_init_scalers(intel_crtc, crtc_state);
15177
15178 BUG_ON(pipe >= ARRAY_SIZE(dev_priv->pipe_to_crtc_mapping) ||
15179 dev_priv->pipe_to_crtc_mapping[pipe] != NULL);
15180 dev_priv->pipe_to_crtc_mapping[pipe] = intel_crtc;
15181
15182 if (INTEL_GEN(dev_priv) < 9) {
15183 enum i9xx_plane_id i9xx_plane = primary->i9xx_plane;
15184
15185 BUG_ON(i9xx_plane >= ARRAY_SIZE(dev_priv->plane_to_crtc_mapping) ||
15186 dev_priv->plane_to_crtc_mapping[i9xx_plane] != NULL);
15187 dev_priv->plane_to_crtc_mapping[i9xx_plane] = intel_crtc;
15188 }
15189
15190 drm_crtc_helper_add(&intel_crtc->base, &intel_helper_funcs);
15191
15192 intel_color_init(intel_crtc);
15193
15194 WARN_ON(drm_crtc_index(&intel_crtc->base) != intel_crtc->pipe);
15195
15196 return 0;
15197
15198 fail:
15199
15200
15201
15202
15203 kfree(crtc_state);
15204 kfree(intel_crtc);
15205
15206 return ret;
15207 }
15208
15209 int intel_get_pipe_from_crtc_id_ioctl(struct drm_device *dev, void *data,
15210 struct drm_file *file)
15211 {
15212 struct drm_i915_get_pipe_from_crtc_id *pipe_from_crtc_id = data;
15213 struct drm_crtc *drmmode_crtc;
15214 struct intel_crtc *crtc;
15215
15216 drmmode_crtc = drm_crtc_find(dev, file, pipe_from_crtc_id->crtc_id);
15217 if (!drmmode_crtc)
15218 return -ENOENT;
15219
15220 crtc = to_intel_crtc(drmmode_crtc);
15221 pipe_from_crtc_id->pipe = crtc->pipe;
15222
15223 return 0;
15224 }
15225
15226 static int intel_encoder_clones(struct intel_encoder *encoder)
15227 {
15228 struct drm_device *dev = encoder->base.dev;
15229 struct intel_encoder *source_encoder;
15230 int index_mask = 0;
15231 int entry = 0;
15232
15233 for_each_intel_encoder(dev, source_encoder) {
15234 if (encoders_cloneable(encoder, source_encoder))
15235 index_mask |= (1 << entry);
15236
15237 entry++;
15238 }
15239
15240 return index_mask;
15241 }
15242
15243 static bool ilk_has_edp_a(struct drm_i915_private *dev_priv)
15244 {
15245 if (!IS_MOBILE(dev_priv))
15246 return false;
15247
15248 if ((I915_READ(DP_A) & DP_DETECTED) == 0)
15249 return false;
15250
15251 if (IS_GEN(dev_priv, 5) && (I915_READ(FUSE_STRAP) & ILK_eDP_A_DISABLE))
15252 return false;
15253
15254 return true;
15255 }
15256
15257 static bool intel_ddi_crt_present(struct drm_i915_private *dev_priv)
15258 {
15259 if (INTEL_GEN(dev_priv) >= 9)
15260 return false;
15261
15262 if (IS_HSW_ULT(dev_priv) || IS_BDW_ULT(dev_priv))
15263 return false;
15264
15265 if (HAS_PCH_LPT_H(dev_priv) &&
15266 I915_READ(SFUSE_STRAP) & SFUSE_STRAP_CRT_DISABLED)
15267 return false;
15268
15269
15270 if (I915_READ(DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES)
15271 return false;
15272
15273 if (!dev_priv->vbt.int_crt_support)
15274 return false;
15275
15276 return true;
15277 }
15278
15279 void intel_pps_unlock_regs_wa(struct drm_i915_private *dev_priv)
15280 {
15281 int pps_num;
15282 int pps_idx;
15283
15284 if (HAS_DDI(dev_priv))
15285 return;
15286
15287
15288
15289
15290 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
15291 pps_num = 2;
15292 else
15293 pps_num = 1;
15294
15295 for (pps_idx = 0; pps_idx < pps_num; pps_idx++) {
15296 u32 val = I915_READ(PP_CONTROL(pps_idx));
15297
15298 val = (val & ~PANEL_UNLOCK_MASK) | PANEL_UNLOCK_REGS;
15299 I915_WRITE(PP_CONTROL(pps_idx), val);
15300 }
15301 }
15302
15303 static void intel_pps_init(struct drm_i915_private *dev_priv)
15304 {
15305 if (HAS_PCH_SPLIT(dev_priv) || IS_GEN9_LP(dev_priv))
15306 dev_priv->pps_mmio_base = PCH_PPS_BASE;
15307 else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
15308 dev_priv->pps_mmio_base = VLV_PPS_BASE;
15309 else
15310 dev_priv->pps_mmio_base = PPS_BASE;
15311
15312 intel_pps_unlock_regs_wa(dev_priv);
15313 }
15314
15315 static void intel_setup_outputs(struct drm_i915_private *dev_priv)
15316 {
15317 struct intel_encoder *encoder;
15318 bool dpd_is_edp = false;
15319
15320 intel_pps_init(dev_priv);
15321
15322 if (!HAS_DISPLAY(dev_priv))
15323 return;
15324
15325 if (INTEL_GEN(dev_priv) >= 12) {
15326
15327 intel_ddi_init(dev_priv, PORT_A);
15328 intel_ddi_init(dev_priv, PORT_B);
15329 icl_dsi_init(dev_priv);
15330 } else if (IS_ELKHARTLAKE(dev_priv)) {
15331 intel_ddi_init(dev_priv, PORT_A);
15332 intel_ddi_init(dev_priv, PORT_B);
15333 intel_ddi_init(dev_priv, PORT_C);
15334 intel_ddi_init(dev_priv, PORT_D);
15335 icl_dsi_init(dev_priv);
15336 } else if (IS_GEN(dev_priv, 11)) {
15337 intel_ddi_init(dev_priv, PORT_A);
15338 intel_ddi_init(dev_priv, PORT_B);
15339 intel_ddi_init(dev_priv, PORT_C);
15340 intel_ddi_init(dev_priv, PORT_D);
15341 intel_ddi_init(dev_priv, PORT_E);
15342
15343
15344
15345
15346
15347 if (IS_ICL_WITH_PORT_F(dev_priv) &&
15348 intel_bios_is_port_present(dev_priv, PORT_F))
15349 intel_ddi_init(dev_priv, PORT_F);
15350
15351 icl_dsi_init(dev_priv);
15352 } else if (IS_GEN9_LP(dev_priv)) {
15353
15354
15355
15356
15357
15358 intel_ddi_init(dev_priv, PORT_A);
15359 intel_ddi_init(dev_priv, PORT_B);
15360 intel_ddi_init(dev_priv, PORT_C);
15361
15362 vlv_dsi_init(dev_priv);
15363 } else if (HAS_DDI(dev_priv)) {
15364 int found;
15365
15366 if (intel_ddi_crt_present(dev_priv))
15367 intel_crt_init(dev_priv);
15368
15369
15370
15371
15372
15373
15374 found = I915_READ(DDI_BUF_CTL(PORT_A)) & DDI_INIT_DISPLAY_DETECTED;
15375
15376 if (found || IS_GEN9_BC(dev_priv))
15377 intel_ddi_init(dev_priv, PORT_A);
15378
15379
15380
15381 found = I915_READ(SFUSE_STRAP);
15382
15383 if (found & SFUSE_STRAP_DDIB_DETECTED)
15384 intel_ddi_init(dev_priv, PORT_B);
15385 if (found & SFUSE_STRAP_DDIC_DETECTED)
15386 intel_ddi_init(dev_priv, PORT_C);
15387 if (found & SFUSE_STRAP_DDID_DETECTED)
15388 intel_ddi_init(dev_priv, PORT_D);
15389 if (found & SFUSE_STRAP_DDIF_DETECTED)
15390 intel_ddi_init(dev_priv, PORT_F);
15391
15392
15393
15394 if (IS_GEN9_BC(dev_priv) &&
15395 intel_bios_is_port_present(dev_priv, PORT_E))
15396 intel_ddi_init(dev_priv, PORT_E);
15397
15398 } else if (HAS_PCH_SPLIT(dev_priv)) {
15399 int found;
15400
15401
15402
15403
15404
15405
15406 intel_lvds_init(dev_priv);
15407 intel_crt_init(dev_priv);
15408
15409 dpd_is_edp = intel_dp_is_port_edp(dev_priv, PORT_D);
15410
15411 if (ilk_has_edp_a(dev_priv))
15412 intel_dp_init(dev_priv, DP_A, PORT_A);
15413
15414 if (I915_READ(PCH_HDMIB) & SDVO_DETECTED) {
15415
15416 found = intel_sdvo_init(dev_priv, PCH_SDVOB, PORT_B);
15417 if (!found)
15418 intel_hdmi_init(dev_priv, PCH_HDMIB, PORT_B);
15419 if (!found && (I915_READ(PCH_DP_B) & DP_DETECTED))
15420 intel_dp_init(dev_priv, PCH_DP_B, PORT_B);
15421 }
15422
15423 if (I915_READ(PCH_HDMIC) & SDVO_DETECTED)
15424 intel_hdmi_init(dev_priv, PCH_HDMIC, PORT_C);
15425
15426 if (!dpd_is_edp && I915_READ(PCH_HDMID) & SDVO_DETECTED)
15427 intel_hdmi_init(dev_priv, PCH_HDMID, PORT_D);
15428
15429 if (I915_READ(PCH_DP_C) & DP_DETECTED)
15430 intel_dp_init(dev_priv, PCH_DP_C, PORT_C);
15431
15432 if (I915_READ(PCH_DP_D) & DP_DETECTED)
15433 intel_dp_init(dev_priv, PCH_DP_D, PORT_D);
15434 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
15435 bool has_edp, has_port;
15436
15437 if (IS_VALLEYVIEW(dev_priv) && dev_priv->vbt.int_crt_support)
15438 intel_crt_init(dev_priv);
15439
15440
15441
15442
15443
15444
15445
15446
15447
15448
15449
15450
15451
15452
15453
15454
15455 has_edp = intel_dp_is_port_edp(dev_priv, PORT_B);
15456 has_port = intel_bios_is_port_present(dev_priv, PORT_B);
15457 if (I915_READ(VLV_DP_B) & DP_DETECTED || has_port)
15458 has_edp &= intel_dp_init(dev_priv, VLV_DP_B, PORT_B);
15459 if ((I915_READ(VLV_HDMIB) & SDVO_DETECTED || has_port) && !has_edp)
15460 intel_hdmi_init(dev_priv, VLV_HDMIB, PORT_B);
15461
15462 has_edp = intel_dp_is_port_edp(dev_priv, PORT_C);
15463 has_port = intel_bios_is_port_present(dev_priv, PORT_C);
15464 if (I915_READ(VLV_DP_C) & DP_DETECTED || has_port)
15465 has_edp &= intel_dp_init(dev_priv, VLV_DP_C, PORT_C);
15466 if ((I915_READ(VLV_HDMIC) & SDVO_DETECTED || has_port) && !has_edp)
15467 intel_hdmi_init(dev_priv, VLV_HDMIC, PORT_C);
15468
15469 if (IS_CHERRYVIEW(dev_priv)) {
15470
15471
15472
15473
15474 has_port = intel_bios_is_port_present(dev_priv, PORT_D);
15475 if (I915_READ(CHV_DP_D) & DP_DETECTED || has_port)
15476 intel_dp_init(dev_priv, CHV_DP_D, PORT_D);
15477 if (I915_READ(CHV_HDMID) & SDVO_DETECTED || has_port)
15478 intel_hdmi_init(dev_priv, CHV_HDMID, PORT_D);
15479 }
15480
15481 vlv_dsi_init(dev_priv);
15482 } else if (IS_PINEVIEW(dev_priv)) {
15483 intel_lvds_init(dev_priv);
15484 intel_crt_init(dev_priv);
15485 } else if (IS_GEN_RANGE(dev_priv, 3, 4)) {
15486 bool found = false;
15487
15488 if (IS_MOBILE(dev_priv))
15489 intel_lvds_init(dev_priv);
15490
15491 intel_crt_init(dev_priv);
15492
15493 if (I915_READ(GEN3_SDVOB) & SDVO_DETECTED) {
15494 DRM_DEBUG_KMS("probing SDVOB\n");
15495 found = intel_sdvo_init(dev_priv, GEN3_SDVOB, PORT_B);
15496 if (!found && IS_G4X(dev_priv)) {
15497 DRM_DEBUG_KMS("probing HDMI on SDVOB\n");
15498 intel_hdmi_init(dev_priv, GEN4_HDMIB, PORT_B);
15499 }
15500
15501 if (!found && IS_G4X(dev_priv))
15502 intel_dp_init(dev_priv, DP_B, PORT_B);
15503 }
15504
15505
15506
15507 if (I915_READ(GEN3_SDVOB) & SDVO_DETECTED) {
15508 DRM_DEBUG_KMS("probing SDVOC\n");
15509 found = intel_sdvo_init(dev_priv, GEN3_SDVOC, PORT_C);
15510 }
15511
15512 if (!found && (I915_READ(GEN3_SDVOC) & SDVO_DETECTED)) {
15513
15514 if (IS_G4X(dev_priv)) {
15515 DRM_DEBUG_KMS("probing HDMI on SDVOC\n");
15516 intel_hdmi_init(dev_priv, GEN4_HDMIC, PORT_C);
15517 }
15518 if (IS_G4X(dev_priv))
15519 intel_dp_init(dev_priv, DP_C, PORT_C);
15520 }
15521
15522 if (IS_G4X(dev_priv) && (I915_READ(DP_D) & DP_DETECTED))
15523 intel_dp_init(dev_priv, DP_D, PORT_D);
15524
15525 if (SUPPORTS_TV(dev_priv))
15526 intel_tv_init(dev_priv);
15527 } else if (IS_GEN(dev_priv, 2)) {
15528 if (IS_I85X(dev_priv))
15529 intel_lvds_init(dev_priv);
15530
15531 intel_crt_init(dev_priv);
15532 intel_dvo_init(dev_priv);
15533 }
15534
15535 intel_psr_init(dev_priv);
15536
15537 for_each_intel_encoder(&dev_priv->drm, encoder) {
15538 encoder->base.possible_crtcs = encoder->crtc_mask;
15539 encoder->base.possible_clones =
15540 intel_encoder_clones(encoder);
15541 }
15542
15543 intel_init_pch_refclk(dev_priv);
15544
15545 drm_helper_move_panel_connectors_to_head(&dev_priv->drm);
15546 }
15547
15548 static void intel_user_framebuffer_destroy(struct drm_framebuffer *fb)
15549 {
15550 struct intel_framebuffer *intel_fb = to_intel_framebuffer(fb);
15551
15552 drm_framebuffer_cleanup(fb);
15553 intel_frontbuffer_put(intel_fb->frontbuffer);
15554
15555 kfree(intel_fb);
15556 }
15557
15558 static int intel_user_framebuffer_create_handle(struct drm_framebuffer *fb,
15559 struct drm_file *file,
15560 unsigned int *handle)
15561 {
15562 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
15563
15564 if (obj->userptr.mm) {
15565 DRM_DEBUG("attempting to use a userptr for a framebuffer, denied\n");
15566 return -EINVAL;
15567 }
15568
15569 return drm_gem_handle_create(file, &obj->base, handle);
15570 }
15571
15572 static int intel_user_framebuffer_dirty(struct drm_framebuffer *fb,
15573 struct drm_file *file,
15574 unsigned flags, unsigned color,
15575 struct drm_clip_rect *clips,
15576 unsigned num_clips)
15577 {
15578 struct drm_i915_gem_object *obj = intel_fb_obj(fb);
15579
15580 i915_gem_object_flush_if_display(obj);
15581 intel_frontbuffer_flush(to_intel_frontbuffer(fb), ORIGIN_DIRTYFB);
15582
15583 return 0;
15584 }
15585
15586 static const struct drm_framebuffer_funcs intel_fb_funcs = {
15587 .destroy = intel_user_framebuffer_destroy,
15588 .create_handle = intel_user_framebuffer_create_handle,
15589 .dirty = intel_user_framebuffer_dirty,
15590 };
15591
15592 static int intel_framebuffer_init(struct intel_framebuffer *intel_fb,
15593 struct drm_i915_gem_object *obj,
15594 struct drm_mode_fb_cmd2 *mode_cmd)
15595 {
15596 struct drm_i915_private *dev_priv = to_i915(obj->base.dev);
15597 struct drm_framebuffer *fb = &intel_fb->base;
15598 u32 max_stride;
15599 unsigned int tiling, stride;
15600 int ret = -EINVAL;
15601 int i;
15602
15603 intel_fb->frontbuffer = intel_frontbuffer_get(obj);
15604 if (!intel_fb->frontbuffer)
15605 return -ENOMEM;
15606
15607 i915_gem_object_lock(obj);
15608 tiling = i915_gem_object_get_tiling(obj);
15609 stride = i915_gem_object_get_stride(obj);
15610 i915_gem_object_unlock(obj);
15611
15612 if (mode_cmd->flags & DRM_MODE_FB_MODIFIERS) {
15613
15614
15615
15616
15617 if (tiling != I915_TILING_NONE &&
15618 tiling != intel_fb_modifier_to_tiling(mode_cmd->modifier[0])) {
15619 DRM_DEBUG_KMS("tiling_mode doesn't match fb modifier\n");
15620 goto err;
15621 }
15622 } else {
15623 if (tiling == I915_TILING_X) {
15624 mode_cmd->modifier[0] = I915_FORMAT_MOD_X_TILED;
15625 } else if (tiling == I915_TILING_Y) {
15626 DRM_DEBUG_KMS("No Y tiling for legacy addfb\n");
15627 goto err;
15628 }
15629 }
15630
15631 if (!drm_any_plane_has_format(&dev_priv->drm,
15632 mode_cmd->pixel_format,
15633 mode_cmd->modifier[0])) {
15634 struct drm_format_name_buf format_name;
15635
15636 DRM_DEBUG_KMS("unsupported pixel format %s / modifier 0x%llx\n",
15637 drm_get_format_name(mode_cmd->pixel_format,
15638 &format_name),
15639 mode_cmd->modifier[0]);
15640 goto err;
15641 }
15642
15643
15644
15645
15646
15647 if (INTEL_GEN(dev_priv) < 4 &&
15648 tiling != intel_fb_modifier_to_tiling(mode_cmd->modifier[0])) {
15649 DRM_DEBUG_KMS("tiling_mode must match fb modifier exactly on gen2/3\n");
15650 goto err;
15651 }
15652
15653 max_stride = intel_fb_max_stride(dev_priv, mode_cmd->pixel_format,
15654 mode_cmd->modifier[0]);
15655 if (mode_cmd->pitches[0] > max_stride) {
15656 DRM_DEBUG_KMS("%s pitch (%u) must be at most %d\n",
15657 mode_cmd->modifier[0] != DRM_FORMAT_MOD_LINEAR ?
15658 "tiled" : "linear",
15659 mode_cmd->pitches[0], max_stride);
15660 goto err;
15661 }
15662
15663
15664
15665
15666
15667 if (tiling != I915_TILING_NONE && mode_cmd->pitches[0] != stride) {
15668 DRM_DEBUG_KMS("pitch (%d) must match tiling stride (%d)\n",
15669 mode_cmd->pitches[0], stride);
15670 goto err;
15671 }
15672
15673
15674 if (mode_cmd->offsets[0] != 0)
15675 goto err;
15676
15677 drm_helper_mode_fill_fb_struct(&dev_priv->drm, fb, mode_cmd);
15678
15679 for (i = 0; i < fb->format->num_planes; i++) {
15680 u32 stride_alignment;
15681
15682 if (mode_cmd->handles[i] != mode_cmd->handles[0]) {
15683 DRM_DEBUG_KMS("bad plane %d handle\n", i);
15684 goto err;
15685 }
15686
15687 stride_alignment = intel_fb_stride_alignment(fb, i);
15688
15689
15690
15691
15692
15693
15694
15695
15696
15697
15698 if (IS_GEN(dev_priv, 9) && i == 0 && fb->width > 3840 &&
15699 is_ccs_modifier(fb->modifier))
15700 stride_alignment *= 4;
15701
15702 if (fb->pitches[i] & (stride_alignment - 1)) {
15703 DRM_DEBUG_KMS("plane %d pitch (%d) must be at least %u byte aligned\n",
15704 i, fb->pitches[i], stride_alignment);
15705 goto err;
15706 }
15707
15708 fb->obj[i] = &obj->base;
15709 }
15710
15711 ret = intel_fill_fb_info(dev_priv, fb);
15712 if (ret)
15713 goto err;
15714
15715 ret = drm_framebuffer_init(&dev_priv->drm, fb, &intel_fb_funcs);
15716 if (ret) {
15717 DRM_ERROR("framebuffer init failed %d\n", ret);
15718 goto err;
15719 }
15720
15721 return 0;
15722
15723 err:
15724 intel_frontbuffer_put(intel_fb->frontbuffer);
15725 return ret;
15726 }
15727
15728 static struct drm_framebuffer *
15729 intel_user_framebuffer_create(struct drm_device *dev,
15730 struct drm_file *filp,
15731 const struct drm_mode_fb_cmd2 *user_mode_cmd)
15732 {
15733 struct drm_framebuffer *fb;
15734 struct drm_i915_gem_object *obj;
15735 struct drm_mode_fb_cmd2 mode_cmd = *user_mode_cmd;
15736
15737 obj = i915_gem_object_lookup(filp, mode_cmd.handles[0]);
15738 if (!obj)
15739 return ERR_PTR(-ENOENT);
15740
15741 fb = intel_framebuffer_create(obj, &mode_cmd);
15742 i915_gem_object_put(obj);
15743
15744 return fb;
15745 }
15746
15747 static void intel_atomic_state_free(struct drm_atomic_state *state)
15748 {
15749 struct intel_atomic_state *intel_state = to_intel_atomic_state(state);
15750
15751 drm_atomic_state_default_release(state);
15752
15753 i915_sw_fence_fini(&intel_state->commit_ready);
15754
15755 kfree(state);
15756 }
15757
15758 static enum drm_mode_status
15759 intel_mode_valid(struct drm_device *dev,
15760 const struct drm_display_mode *mode)
15761 {
15762 struct drm_i915_private *dev_priv = to_i915(dev);
15763 int hdisplay_max, htotal_max;
15764 int vdisplay_max, vtotal_max;
15765
15766
15767
15768
15769
15770
15771
15772
15773
15774
15775
15776
15777
15778
15779 if (mode->vscan > 1)
15780 return MODE_NO_VSCAN;
15781
15782 if (mode->flags & DRM_MODE_FLAG_HSKEW)
15783 return MODE_H_ILLEGAL;
15784
15785 if (mode->flags & (DRM_MODE_FLAG_CSYNC |
15786 DRM_MODE_FLAG_NCSYNC |
15787 DRM_MODE_FLAG_PCSYNC))
15788 return MODE_HSYNC;
15789
15790 if (mode->flags & (DRM_MODE_FLAG_BCAST |
15791 DRM_MODE_FLAG_PIXMUX |
15792 DRM_MODE_FLAG_CLKDIV2))
15793 return MODE_BAD;
15794
15795 if (INTEL_GEN(dev_priv) >= 9 ||
15796 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) {
15797 hdisplay_max = 8192;
15798 vdisplay_max = 4096;
15799 htotal_max = 8192;
15800 vtotal_max = 8192;
15801 } else if (INTEL_GEN(dev_priv) >= 3) {
15802 hdisplay_max = 4096;
15803 vdisplay_max = 4096;
15804 htotal_max = 8192;
15805 vtotal_max = 8192;
15806 } else {
15807 hdisplay_max = 2048;
15808 vdisplay_max = 2048;
15809 htotal_max = 4096;
15810 vtotal_max = 4096;
15811 }
15812
15813 if (mode->hdisplay > hdisplay_max ||
15814 mode->hsync_start > htotal_max ||
15815 mode->hsync_end > htotal_max ||
15816 mode->htotal > htotal_max)
15817 return MODE_H_ILLEGAL;
15818
15819 if (mode->vdisplay > vdisplay_max ||
15820 mode->vsync_start > vtotal_max ||
15821 mode->vsync_end > vtotal_max ||
15822 mode->vtotal > vtotal_max)
15823 return MODE_V_ILLEGAL;
15824
15825 return MODE_OK;
15826 }
15827
15828 static const struct drm_mode_config_funcs intel_mode_funcs = {
15829 .fb_create = intel_user_framebuffer_create,
15830 .get_format_info = intel_get_format_info,
15831 .output_poll_changed = intel_fbdev_output_poll_changed,
15832 .mode_valid = intel_mode_valid,
15833 .atomic_check = intel_atomic_check,
15834 .atomic_commit = intel_atomic_commit,
15835 .atomic_state_alloc = intel_atomic_state_alloc,
15836 .atomic_state_clear = intel_atomic_state_clear,
15837 .atomic_state_free = intel_atomic_state_free,
15838 };
15839
15840
15841
15842
15843
15844 void intel_init_display_hooks(struct drm_i915_private *dev_priv)
15845 {
15846 intel_init_cdclk_hooks(dev_priv);
15847
15848 if (INTEL_GEN(dev_priv) >= 9) {
15849 dev_priv->display.get_pipe_config = haswell_get_pipe_config;
15850 dev_priv->display.get_initial_plane_config =
15851 skylake_get_initial_plane_config;
15852 dev_priv->display.crtc_compute_clock =
15853 haswell_crtc_compute_clock;
15854 dev_priv->display.crtc_enable = haswell_crtc_enable;
15855 dev_priv->display.crtc_disable = haswell_crtc_disable;
15856 } else if (HAS_DDI(dev_priv)) {
15857 dev_priv->display.get_pipe_config = haswell_get_pipe_config;
15858 dev_priv->display.get_initial_plane_config =
15859 i9xx_get_initial_plane_config;
15860 dev_priv->display.crtc_compute_clock =
15861 haswell_crtc_compute_clock;
15862 dev_priv->display.crtc_enable = haswell_crtc_enable;
15863 dev_priv->display.crtc_disable = haswell_crtc_disable;
15864 } else if (HAS_PCH_SPLIT(dev_priv)) {
15865 dev_priv->display.get_pipe_config = ironlake_get_pipe_config;
15866 dev_priv->display.get_initial_plane_config =
15867 i9xx_get_initial_plane_config;
15868 dev_priv->display.crtc_compute_clock =
15869 ironlake_crtc_compute_clock;
15870 dev_priv->display.crtc_enable = ironlake_crtc_enable;
15871 dev_priv->display.crtc_disable = ironlake_crtc_disable;
15872 } else if (IS_CHERRYVIEW(dev_priv)) {
15873 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15874 dev_priv->display.get_initial_plane_config =
15875 i9xx_get_initial_plane_config;
15876 dev_priv->display.crtc_compute_clock = chv_crtc_compute_clock;
15877 dev_priv->display.crtc_enable = valleyview_crtc_enable;
15878 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15879 } else if (IS_VALLEYVIEW(dev_priv)) {
15880 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15881 dev_priv->display.get_initial_plane_config =
15882 i9xx_get_initial_plane_config;
15883 dev_priv->display.crtc_compute_clock = vlv_crtc_compute_clock;
15884 dev_priv->display.crtc_enable = valleyview_crtc_enable;
15885 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15886 } else if (IS_G4X(dev_priv)) {
15887 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15888 dev_priv->display.get_initial_plane_config =
15889 i9xx_get_initial_plane_config;
15890 dev_priv->display.crtc_compute_clock = g4x_crtc_compute_clock;
15891 dev_priv->display.crtc_enable = i9xx_crtc_enable;
15892 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15893 } else if (IS_PINEVIEW(dev_priv)) {
15894 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15895 dev_priv->display.get_initial_plane_config =
15896 i9xx_get_initial_plane_config;
15897 dev_priv->display.crtc_compute_clock = pnv_crtc_compute_clock;
15898 dev_priv->display.crtc_enable = i9xx_crtc_enable;
15899 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15900 } else if (!IS_GEN(dev_priv, 2)) {
15901 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15902 dev_priv->display.get_initial_plane_config =
15903 i9xx_get_initial_plane_config;
15904 dev_priv->display.crtc_compute_clock = i9xx_crtc_compute_clock;
15905 dev_priv->display.crtc_enable = i9xx_crtc_enable;
15906 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15907 } else {
15908 dev_priv->display.get_pipe_config = i9xx_get_pipe_config;
15909 dev_priv->display.get_initial_plane_config =
15910 i9xx_get_initial_plane_config;
15911 dev_priv->display.crtc_compute_clock = i8xx_crtc_compute_clock;
15912 dev_priv->display.crtc_enable = i9xx_crtc_enable;
15913 dev_priv->display.crtc_disable = i9xx_crtc_disable;
15914 }
15915
15916 if (IS_GEN(dev_priv, 5)) {
15917 dev_priv->display.fdi_link_train = ironlake_fdi_link_train;
15918 } else if (IS_GEN(dev_priv, 6)) {
15919 dev_priv->display.fdi_link_train = gen6_fdi_link_train;
15920 } else if (IS_IVYBRIDGE(dev_priv)) {
15921
15922 dev_priv->display.fdi_link_train = ivb_manual_fdi_link_train;
15923 } else if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv)) {
15924 dev_priv->display.fdi_link_train = hsw_fdi_link_train;
15925 }
15926
15927 if (INTEL_GEN(dev_priv) >= 9)
15928 dev_priv->display.update_crtcs = skl_update_crtcs;
15929 else
15930 dev_priv->display.update_crtcs = intel_update_crtcs;
15931 }
15932
15933 static i915_reg_t i915_vgacntrl_reg(struct drm_i915_private *dev_priv)
15934 {
15935 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
15936 return VLV_VGACNTRL;
15937 else if (INTEL_GEN(dev_priv) >= 5)
15938 return CPU_VGACNTRL;
15939 else
15940 return VGACNTRL;
15941 }
15942
15943
15944 static void i915_disable_vga(struct drm_i915_private *dev_priv)
15945 {
15946 struct pci_dev *pdev = dev_priv->drm.pdev;
15947 u8 sr1;
15948 i915_reg_t vga_reg = i915_vgacntrl_reg(dev_priv);
15949
15950
15951 vga_get_uninterruptible(pdev, VGA_RSRC_LEGACY_IO);
15952 outb(SR01, VGA_SR_INDEX);
15953 sr1 = inb(VGA_SR_DATA);
15954 outb(sr1 | 1<<5, VGA_SR_DATA);
15955 vga_put(pdev, VGA_RSRC_LEGACY_IO);
15956 udelay(300);
15957
15958 I915_WRITE(vga_reg, VGA_DISP_DISABLE);
15959 POSTING_READ(vga_reg);
15960 }
15961
15962 void intel_modeset_init_hw(struct drm_device *dev)
15963 {
15964 struct drm_i915_private *dev_priv = to_i915(dev);
15965
15966 intel_update_cdclk(dev_priv);
15967 intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
15968 dev_priv->cdclk.logical = dev_priv->cdclk.actual = dev_priv->cdclk.hw;
15969 }
15970
15971
15972
15973
15974
15975
15976
15977
15978
15979
15980
15981 static void sanitize_watermarks(struct drm_device *dev)
15982 {
15983 struct drm_i915_private *dev_priv = to_i915(dev);
15984 struct drm_atomic_state *state;
15985 struct intel_atomic_state *intel_state;
15986 struct intel_crtc *crtc;
15987 struct intel_crtc_state *crtc_state;
15988 struct drm_modeset_acquire_ctx ctx;
15989 int ret;
15990 int i;
15991
15992
15993 if (!dev_priv->display.optimize_watermarks)
15994 return;
15995
15996
15997
15998
15999
16000 drm_modeset_acquire_init(&ctx, 0);
16001 retry:
16002 ret = drm_modeset_lock_all_ctx(dev, &ctx);
16003 if (ret == -EDEADLK) {
16004 drm_modeset_backoff(&ctx);
16005 goto retry;
16006 } else if (WARN_ON(ret)) {
16007 goto fail;
16008 }
16009
16010 state = drm_atomic_helper_duplicate_state(dev, &ctx);
16011 if (WARN_ON(IS_ERR(state)))
16012 goto fail;
16013
16014 intel_state = to_intel_atomic_state(state);
16015
16016
16017
16018
16019
16020
16021 if (!HAS_GMCH(dev_priv))
16022 intel_state->skip_intermediate_wm = true;
16023
16024 ret = intel_atomic_check(dev, state);
16025 if (ret) {
16026
16027
16028
16029
16030
16031
16032
16033
16034
16035
16036
16037 WARN(true, "Could not determine valid watermarks for inherited state\n");
16038 goto put_state;
16039 }
16040
16041
16042 for_each_new_intel_crtc_in_state(intel_state, crtc, crtc_state, i) {
16043 crtc_state->wm.need_postvbl_update = true;
16044 dev_priv->display.optimize_watermarks(intel_state, crtc_state);
16045
16046 to_intel_crtc_state(crtc->base.state)->wm = crtc_state->wm;
16047 }
16048
16049 put_state:
16050 drm_atomic_state_put(state);
16051 fail:
16052 drm_modeset_drop_locks(&ctx);
16053 drm_modeset_acquire_fini(&ctx);
16054 }
16055
16056 static void intel_update_fdi_pll_freq(struct drm_i915_private *dev_priv)
16057 {
16058 if (IS_GEN(dev_priv, 5)) {
16059 u32 fdi_pll_clk =
16060 I915_READ(FDI_PLL_BIOS_0) & FDI_PLL_FB_CLOCK_MASK;
16061
16062 dev_priv->fdi_pll_freq = (fdi_pll_clk + 2) * 10000;
16063 } else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv)) {
16064 dev_priv->fdi_pll_freq = 270000;
16065 } else {
16066 return;
16067 }
16068
16069 DRM_DEBUG_DRIVER("FDI PLL freq=%d\n", dev_priv->fdi_pll_freq);
16070 }
16071
16072 static int intel_initial_commit(struct drm_device *dev)
16073 {
16074 struct drm_atomic_state *state = NULL;
16075 struct drm_modeset_acquire_ctx ctx;
16076 struct drm_crtc *crtc;
16077 struct drm_crtc_state *crtc_state;
16078 int ret = 0;
16079
16080 state = drm_atomic_state_alloc(dev);
16081 if (!state)
16082 return -ENOMEM;
16083
16084 drm_modeset_acquire_init(&ctx, 0);
16085
16086 retry:
16087 state->acquire_ctx = &ctx;
16088
16089 drm_for_each_crtc(crtc, dev) {
16090 crtc_state = drm_atomic_get_crtc_state(state, crtc);
16091 if (IS_ERR(crtc_state)) {
16092 ret = PTR_ERR(crtc_state);
16093 goto out;
16094 }
16095
16096 if (crtc_state->active) {
16097 ret = drm_atomic_add_affected_planes(state, crtc);
16098 if (ret)
16099 goto out;
16100
16101
16102
16103
16104
16105
16106
16107 crtc_state->color_mgmt_changed = true;
16108 }
16109 }
16110
16111 ret = drm_atomic_commit(state);
16112
16113 out:
16114 if (ret == -EDEADLK) {
16115 drm_atomic_state_clear(state);
16116 drm_modeset_backoff(&ctx);
16117 goto retry;
16118 }
16119
16120 drm_atomic_state_put(state);
16121
16122 drm_modeset_drop_locks(&ctx);
16123 drm_modeset_acquire_fini(&ctx);
16124
16125 return ret;
16126 }
16127
16128 int intel_modeset_init(struct drm_device *dev)
16129 {
16130 struct drm_i915_private *dev_priv = to_i915(dev);
16131 enum pipe pipe;
16132 struct intel_crtc *crtc;
16133 int ret;
16134
16135 dev_priv->modeset_wq = alloc_ordered_workqueue("i915_modeset", 0);
16136
16137 drm_mode_config_init(dev);
16138
16139 ret = intel_bw_init(dev_priv);
16140 if (ret)
16141 return ret;
16142
16143 dev->mode_config.min_width = 0;
16144 dev->mode_config.min_height = 0;
16145
16146 dev->mode_config.preferred_depth = 24;
16147 dev->mode_config.prefer_shadow = 1;
16148
16149 dev->mode_config.allow_fb_modifiers = true;
16150
16151 dev->mode_config.funcs = &intel_mode_funcs;
16152
16153 init_llist_head(&dev_priv->atomic_helper.free_list);
16154 INIT_WORK(&dev_priv->atomic_helper.free_work,
16155 intel_atomic_helper_free_state_worker);
16156
16157 intel_init_quirks(dev_priv);
16158
16159 intel_fbc_init(dev_priv);
16160
16161 intel_init_pm(dev_priv);
16162
16163
16164
16165
16166
16167
16168
16169 if (HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv)) {
16170 bool bios_lvds_use_ssc = !!(I915_READ(PCH_DREF_CONTROL) &
16171 DREF_SSC1_ENABLE);
16172
16173 if (dev_priv->vbt.lvds_use_ssc != bios_lvds_use_ssc) {
16174 DRM_DEBUG_KMS("SSC %sabled by BIOS, overriding VBT which says %sabled\n",
16175 bios_lvds_use_ssc ? "en" : "dis",
16176 dev_priv->vbt.lvds_use_ssc ? "en" : "dis");
16177 dev_priv->vbt.lvds_use_ssc = bios_lvds_use_ssc;
16178 }
16179 }
16180
16181
16182
16183
16184
16185 if (INTEL_GEN(dev_priv) >= 7) {
16186 dev->mode_config.max_width = 16384;
16187 dev->mode_config.max_height = 16384;
16188 } else if (INTEL_GEN(dev_priv) >= 4) {
16189 dev->mode_config.max_width = 8192;
16190 dev->mode_config.max_height = 8192;
16191 } else if (IS_GEN(dev_priv, 3)) {
16192 dev->mode_config.max_width = 4096;
16193 dev->mode_config.max_height = 4096;
16194 } else {
16195 dev->mode_config.max_width = 2048;
16196 dev->mode_config.max_height = 2048;
16197 }
16198
16199 if (IS_I845G(dev_priv) || IS_I865G(dev_priv)) {
16200 dev->mode_config.cursor_width = IS_I845G(dev_priv) ? 64 : 512;
16201 dev->mode_config.cursor_height = 1023;
16202 } else if (IS_GEN(dev_priv, 2)) {
16203 dev->mode_config.cursor_width = 64;
16204 dev->mode_config.cursor_height = 64;
16205 } else {
16206 dev->mode_config.cursor_width = 256;
16207 dev->mode_config.cursor_height = 256;
16208 }
16209
16210 DRM_DEBUG_KMS("%d display pipe%s available.\n",
16211 INTEL_INFO(dev_priv)->num_pipes,
16212 INTEL_INFO(dev_priv)->num_pipes > 1 ? "s" : "");
16213
16214 for_each_pipe(dev_priv, pipe) {
16215 ret = intel_crtc_init(dev_priv, pipe);
16216 if (ret) {
16217 drm_mode_config_cleanup(dev);
16218 return ret;
16219 }
16220 }
16221
16222 intel_shared_dpll_init(dev);
16223 intel_update_fdi_pll_freq(dev_priv);
16224
16225 intel_update_czclk(dev_priv);
16226 intel_modeset_init_hw(dev);
16227
16228 intel_hdcp_component_init(dev_priv);
16229
16230 if (dev_priv->max_cdclk_freq == 0)
16231 intel_update_max_cdclk(dev_priv);
16232
16233
16234 i915_disable_vga(dev_priv);
16235 intel_setup_outputs(dev_priv);
16236
16237 drm_modeset_lock_all(dev);
16238 intel_modeset_setup_hw_state(dev, dev->mode_config.acquire_ctx);
16239 drm_modeset_unlock_all(dev);
16240
16241 for_each_intel_crtc(dev, crtc) {
16242 struct intel_initial_plane_config plane_config = {};
16243
16244 if (!crtc->active)
16245 continue;
16246
16247
16248
16249
16250
16251
16252
16253
16254 dev_priv->display.get_initial_plane_config(crtc,
16255 &plane_config);
16256
16257
16258
16259
16260
16261 intel_find_initial_plane_obj(crtc, &plane_config);
16262 }
16263
16264
16265
16266
16267
16268
16269 if (!HAS_GMCH(dev_priv))
16270 sanitize_watermarks(dev);
16271
16272
16273
16274
16275
16276
16277
16278 ret = intel_initial_commit(dev);
16279 if (ret)
16280 DRM_DEBUG_KMS("Initial commit in probe failed.\n");
16281
16282 return 0;
16283 }
16284
16285 void i830_enable_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
16286 {
16287 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
16288
16289 struct dpll clock = {
16290 .m1 = 18,
16291 .m2 = 7,
16292 .p1 = 13,
16293 .p2 = 4,
16294 .n = 2,
16295 };
16296 u32 dpll, fp;
16297 int i;
16298
16299 WARN_ON(i9xx_calc_dpll_params(48000, &clock) != 25154);
16300
16301 DRM_DEBUG_KMS("enabling pipe %c due to force quirk (vco=%d dot=%d)\n",
16302 pipe_name(pipe), clock.vco, clock.dot);
16303
16304 fp = i9xx_dpll_compute_fp(&clock);
16305 dpll = DPLL_DVO_2X_MODE |
16306 DPLL_VGA_MODE_DIS |
16307 ((clock.p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT) |
16308 PLL_P2_DIVIDE_BY_4 |
16309 PLL_REF_INPUT_DREFCLK |
16310 DPLL_VCO_ENABLE;
16311
16312 I915_WRITE(FP0(pipe), fp);
16313 I915_WRITE(FP1(pipe), fp);
16314
16315 I915_WRITE(HTOTAL(pipe), (640 - 1) | ((800 - 1) << 16));
16316 I915_WRITE(HBLANK(pipe), (640 - 1) | ((800 - 1) << 16));
16317 I915_WRITE(HSYNC(pipe), (656 - 1) | ((752 - 1) << 16));
16318 I915_WRITE(VTOTAL(pipe), (480 - 1) | ((525 - 1) << 16));
16319 I915_WRITE(VBLANK(pipe), (480 - 1) | ((525 - 1) << 16));
16320 I915_WRITE(VSYNC(pipe), (490 - 1) | ((492 - 1) << 16));
16321 I915_WRITE(PIPESRC(pipe), ((640 - 1) << 16) | (480 - 1));
16322
16323
16324
16325
16326
16327
16328 I915_WRITE(DPLL(pipe), dpll & ~DPLL_VGA_MODE_DIS);
16329 I915_WRITE(DPLL(pipe), dpll);
16330
16331
16332 POSTING_READ(DPLL(pipe));
16333 udelay(150);
16334
16335
16336
16337
16338
16339
16340 I915_WRITE(DPLL(pipe), dpll);
16341
16342
16343 for (i = 0; i < 3 ; i++) {
16344 I915_WRITE(DPLL(pipe), dpll);
16345 POSTING_READ(DPLL(pipe));
16346 udelay(150);
16347 }
16348
16349 I915_WRITE(PIPECONF(pipe), PIPECONF_ENABLE | PIPECONF_PROGRESSIVE);
16350 POSTING_READ(PIPECONF(pipe));
16351
16352 intel_wait_for_pipe_scanline_moving(crtc);
16353 }
16354
16355 void i830_disable_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
16356 {
16357 struct intel_crtc *crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
16358
16359 DRM_DEBUG_KMS("disabling pipe %c due to force quirk\n",
16360 pipe_name(pipe));
16361
16362 WARN_ON(I915_READ(DSPCNTR(PLANE_A)) & DISPLAY_PLANE_ENABLE);
16363 WARN_ON(I915_READ(DSPCNTR(PLANE_B)) & DISPLAY_PLANE_ENABLE);
16364 WARN_ON(I915_READ(DSPCNTR(PLANE_C)) & DISPLAY_PLANE_ENABLE);
16365 WARN_ON(I915_READ(CURCNTR(PIPE_A)) & MCURSOR_MODE);
16366 WARN_ON(I915_READ(CURCNTR(PIPE_B)) & MCURSOR_MODE);
16367
16368 I915_WRITE(PIPECONF(pipe), 0);
16369 POSTING_READ(PIPECONF(pipe));
16370
16371 intel_wait_for_pipe_scanline_stopped(crtc);
16372
16373 I915_WRITE(DPLL(pipe), DPLL_VGA_MODE_DIS);
16374 POSTING_READ(DPLL(pipe));
16375 }
16376
16377 static void
16378 intel_sanitize_plane_mapping(struct drm_i915_private *dev_priv)
16379 {
16380 struct intel_crtc *crtc;
16381
16382 if (INTEL_GEN(dev_priv) >= 4)
16383 return;
16384
16385 for_each_intel_crtc(&dev_priv->drm, crtc) {
16386 struct intel_plane *plane =
16387 to_intel_plane(crtc->base.primary);
16388 struct intel_crtc *plane_crtc;
16389 enum pipe pipe;
16390
16391 if (!plane->get_hw_state(plane, &pipe))
16392 continue;
16393
16394 if (pipe == crtc->pipe)
16395 continue;
16396
16397 DRM_DEBUG_KMS("[PLANE:%d:%s] attached to the wrong pipe, disabling plane\n",
16398 plane->base.base.id, plane->base.name);
16399
16400 plane_crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
16401 intel_plane_disable_noatomic(plane_crtc, plane);
16402 }
16403 }
16404
16405 static bool intel_crtc_has_encoders(struct intel_crtc *crtc)
16406 {
16407 struct drm_device *dev = crtc->base.dev;
16408 struct intel_encoder *encoder;
16409
16410 for_each_encoder_on_crtc(dev, &crtc->base, encoder)
16411 return true;
16412
16413 return false;
16414 }
16415
16416 static struct intel_connector *intel_encoder_find_connector(struct intel_encoder *encoder)
16417 {
16418 struct drm_device *dev = encoder->base.dev;
16419 struct intel_connector *connector;
16420
16421 for_each_connector_on_encoder(dev, &encoder->base, connector)
16422 return connector;
16423
16424 return NULL;
16425 }
16426
16427 static bool has_pch_trancoder(struct drm_i915_private *dev_priv,
16428 enum pipe pch_transcoder)
16429 {
16430 return HAS_PCH_IBX(dev_priv) || HAS_PCH_CPT(dev_priv) ||
16431 (HAS_PCH_LPT_H(dev_priv) && pch_transcoder == PIPE_A);
16432 }
16433
16434 static void intel_sanitize_crtc(struct intel_crtc *crtc,
16435 struct drm_modeset_acquire_ctx *ctx)
16436 {
16437 struct drm_device *dev = crtc->base.dev;
16438 struct drm_i915_private *dev_priv = to_i915(dev);
16439 struct intel_crtc_state *crtc_state = to_intel_crtc_state(crtc->base.state);
16440 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
16441
16442
16443 if (crtc->active && !transcoder_is_dsi(cpu_transcoder)) {
16444 i915_reg_t reg = PIPECONF(cpu_transcoder);
16445
16446 I915_WRITE(reg,
16447 I915_READ(reg) & ~PIPECONF_FRAME_START_DELAY_MASK);
16448 }
16449
16450 if (crtc_state->base.active) {
16451 struct intel_plane *plane;
16452
16453
16454 for_each_intel_plane_on_crtc(dev, crtc, plane) {
16455 const struct intel_plane_state *plane_state =
16456 to_intel_plane_state(plane->base.state);
16457
16458 if (plane_state->base.visible &&
16459 plane->base.type != DRM_PLANE_TYPE_PRIMARY)
16460 intel_plane_disable_noatomic(crtc, plane);
16461 }
16462
16463
16464
16465
16466
16467 if (INTEL_GEN(dev_priv) >= 9)
16468 I915_WRITE(SKL_BOTTOM_COLOR(crtc->pipe),
16469 SKL_BOTTOM_COLOR_GAMMA_ENABLE |
16470 SKL_BOTTOM_COLOR_CSC_ENABLE);
16471 }
16472
16473
16474
16475 if (crtc_state->base.active && !intel_crtc_has_encoders(crtc))
16476 intel_crtc_disable_noatomic(&crtc->base, ctx);
16477
16478 if (crtc_state->base.active || HAS_GMCH(dev_priv)) {
16479
16480
16481
16482
16483
16484
16485
16486
16487
16488
16489
16490
16491
16492 crtc->cpu_fifo_underrun_disabled = true;
16493
16494
16495
16496
16497
16498
16499
16500
16501
16502 if (has_pch_trancoder(dev_priv, crtc->pipe))
16503 crtc->pch_fifo_underrun_disabled = true;
16504 }
16505 }
16506
16507 static bool has_bogus_dpll_config(const struct intel_crtc_state *crtc_state)
16508 {
16509 struct drm_i915_private *dev_priv = to_i915(crtc_state->base.crtc->dev);
16510
16511
16512
16513
16514
16515
16516
16517
16518
16519
16520
16521 return IS_GEN(dev_priv, 6) &&
16522 crtc_state->base.active &&
16523 crtc_state->shared_dpll &&
16524 crtc_state->port_clock == 0;
16525 }
16526
16527 static void intel_sanitize_encoder(struct intel_encoder *encoder)
16528 {
16529 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
16530 struct intel_connector *connector;
16531 struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
16532 struct intel_crtc_state *crtc_state = crtc ?
16533 to_intel_crtc_state(crtc->base.state) : NULL;
16534
16535
16536
16537
16538 bool has_active_crtc = crtc_state &&
16539 crtc_state->base.active;
16540
16541 if (crtc_state && has_bogus_dpll_config(crtc_state)) {
16542 DRM_DEBUG_KMS("BIOS has misprogrammed the hardware. Disabling pipe %c\n",
16543 pipe_name(crtc->pipe));
16544 has_active_crtc = false;
16545 }
16546
16547 connector = intel_encoder_find_connector(encoder);
16548 if (connector && !has_active_crtc) {
16549 DRM_DEBUG_KMS("[ENCODER:%d:%s] has active connectors but no active pipe!\n",
16550 encoder->base.base.id,
16551 encoder->base.name);
16552
16553
16554
16555
16556 if (crtc_state) {
16557 struct drm_encoder *best_encoder;
16558
16559 DRM_DEBUG_KMS("[ENCODER:%d:%s] manually disabled\n",
16560 encoder->base.base.id,
16561 encoder->base.name);
16562
16563
16564 best_encoder = connector->base.state->best_encoder;
16565 connector->base.state->best_encoder = &encoder->base;
16566
16567 if (encoder->disable)
16568 encoder->disable(encoder, crtc_state,
16569 connector->base.state);
16570 if (encoder->post_disable)
16571 encoder->post_disable(encoder, crtc_state,
16572 connector->base.state);
16573
16574 connector->base.state->best_encoder = best_encoder;
16575 }
16576 encoder->base.crtc = NULL;
16577
16578
16579
16580
16581
16582
16583 connector->base.dpms = DRM_MODE_DPMS_OFF;
16584 connector->base.encoder = NULL;
16585 }
16586
16587
16588 intel_opregion_notify_encoder(encoder, connector && has_active_crtc);
16589
16590 if (INTEL_GEN(dev_priv) >= 11)
16591 icl_sanitize_encoder_pll_mapping(encoder);
16592 }
16593
16594 void i915_redisable_vga_power_on(struct drm_i915_private *dev_priv)
16595 {
16596 i915_reg_t vga_reg = i915_vgacntrl_reg(dev_priv);
16597
16598 if (!(I915_READ(vga_reg) & VGA_DISP_DISABLE)) {
16599 DRM_DEBUG_KMS("Something enabled VGA plane, disabling it\n");
16600 i915_disable_vga(dev_priv);
16601 }
16602 }
16603
16604 void i915_redisable_vga(struct drm_i915_private *dev_priv)
16605 {
16606 intel_wakeref_t wakeref;
16607
16608
16609
16610
16611
16612
16613
16614
16615
16616
16617 wakeref = intel_display_power_get_if_enabled(dev_priv,
16618 POWER_DOMAIN_VGA);
16619 if (!wakeref)
16620 return;
16621
16622 i915_redisable_vga_power_on(dev_priv);
16623
16624 intel_display_power_put(dev_priv, POWER_DOMAIN_VGA, wakeref);
16625 }
16626
16627
16628 static void readout_plane_state(struct drm_i915_private *dev_priv)
16629 {
16630 struct intel_plane *plane;
16631 struct intel_crtc *crtc;
16632
16633 for_each_intel_plane(&dev_priv->drm, plane) {
16634 struct intel_plane_state *plane_state =
16635 to_intel_plane_state(plane->base.state);
16636 struct intel_crtc_state *crtc_state;
16637 enum pipe pipe = PIPE_A;
16638 bool visible;
16639
16640 visible = plane->get_hw_state(plane, &pipe);
16641
16642 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
16643 crtc_state = to_intel_crtc_state(crtc->base.state);
16644
16645 intel_set_plane_visible(crtc_state, plane_state, visible);
16646
16647 DRM_DEBUG_KMS("[PLANE:%d:%s] hw state readout: %s, pipe %c\n",
16648 plane->base.base.id, plane->base.name,
16649 enableddisabled(visible), pipe_name(pipe));
16650 }
16651
16652 for_each_intel_crtc(&dev_priv->drm, crtc) {
16653 struct intel_crtc_state *crtc_state =
16654 to_intel_crtc_state(crtc->base.state);
16655
16656 fixup_active_planes(crtc_state);
16657 }
16658 }
16659
16660 static void intel_modeset_readout_hw_state(struct drm_device *dev)
16661 {
16662 struct drm_i915_private *dev_priv = to_i915(dev);
16663 enum pipe pipe;
16664 struct intel_crtc *crtc;
16665 struct intel_encoder *encoder;
16666 struct intel_connector *connector;
16667 struct drm_connector_list_iter conn_iter;
16668 int i;
16669
16670 dev_priv->active_crtcs = 0;
16671
16672 for_each_intel_crtc(dev, crtc) {
16673 struct intel_crtc_state *crtc_state =
16674 to_intel_crtc_state(crtc->base.state);
16675
16676 __drm_atomic_helper_crtc_destroy_state(&crtc_state->base);
16677 memset(crtc_state, 0, sizeof(*crtc_state));
16678 __drm_atomic_helper_crtc_reset(&crtc->base, &crtc_state->base);
16679
16680 crtc_state->base.active = crtc_state->base.enable =
16681 dev_priv->display.get_pipe_config(crtc, crtc_state);
16682
16683 crtc->base.enabled = crtc_state->base.enable;
16684 crtc->active = crtc_state->base.active;
16685
16686 if (crtc_state->base.active)
16687 dev_priv->active_crtcs |= 1 << crtc->pipe;
16688
16689 DRM_DEBUG_KMS("[CRTC:%d:%s] hw state readout: %s\n",
16690 crtc->base.base.id, crtc->base.name,
16691 enableddisabled(crtc_state->base.active));
16692 }
16693
16694 readout_plane_state(dev_priv);
16695
16696 for (i = 0; i < dev_priv->num_shared_dpll; i++) {
16697 struct intel_shared_dpll *pll = &dev_priv->shared_dplls[i];
16698
16699 pll->on = pll->info->funcs->get_hw_state(dev_priv, pll,
16700 &pll->state.hw_state);
16701
16702 if (IS_ELKHARTLAKE(dev_priv) && pll->on &&
16703 pll->info->id == DPLL_ID_EHL_DPLL4) {
16704 pll->wakeref = intel_display_power_get(dev_priv,
16705 POWER_DOMAIN_DPLL_DC_OFF);
16706 }
16707
16708 pll->state.crtc_mask = 0;
16709 for_each_intel_crtc(dev, crtc) {
16710 struct intel_crtc_state *crtc_state =
16711 to_intel_crtc_state(crtc->base.state);
16712
16713 if (crtc_state->base.active &&
16714 crtc_state->shared_dpll == pll)
16715 pll->state.crtc_mask |= 1 << crtc->pipe;
16716 }
16717 pll->active_mask = pll->state.crtc_mask;
16718
16719 DRM_DEBUG_KMS("%s hw state readout: crtc_mask 0x%08x, on %i\n",
16720 pll->info->name, pll->state.crtc_mask, pll->on);
16721 }
16722
16723 for_each_intel_encoder(dev, encoder) {
16724 pipe = 0;
16725
16726 if (encoder->get_hw_state(encoder, &pipe)) {
16727 struct intel_crtc_state *crtc_state;
16728
16729 crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
16730 crtc_state = to_intel_crtc_state(crtc->base.state);
16731
16732 encoder->base.crtc = &crtc->base;
16733 encoder->get_config(encoder, crtc_state);
16734 } else {
16735 encoder->base.crtc = NULL;
16736 }
16737
16738 DRM_DEBUG_KMS("[ENCODER:%d:%s] hw state readout: %s, pipe %c\n",
16739 encoder->base.base.id, encoder->base.name,
16740 enableddisabled(encoder->base.crtc),
16741 pipe_name(pipe));
16742 }
16743
16744 drm_connector_list_iter_begin(dev, &conn_iter);
16745 for_each_intel_connector_iter(connector, &conn_iter) {
16746 if (connector->get_hw_state(connector)) {
16747 connector->base.dpms = DRM_MODE_DPMS_ON;
16748
16749 encoder = connector->encoder;
16750 connector->base.encoder = &encoder->base;
16751
16752 if (encoder->base.crtc &&
16753 encoder->base.crtc->state->active) {
16754
16755
16756
16757
16758
16759 encoder->base.crtc->state->connector_mask |=
16760 drm_connector_mask(&connector->base);
16761 encoder->base.crtc->state->encoder_mask |=
16762 drm_encoder_mask(&encoder->base);
16763 }
16764
16765 } else {
16766 connector->base.dpms = DRM_MODE_DPMS_OFF;
16767 connector->base.encoder = NULL;
16768 }
16769 DRM_DEBUG_KMS("[CONNECTOR:%d:%s] hw state readout: %s\n",
16770 connector->base.base.id, connector->base.name,
16771 enableddisabled(connector->base.encoder));
16772 }
16773 drm_connector_list_iter_end(&conn_iter);
16774
16775 for_each_intel_crtc(dev, crtc) {
16776 struct intel_bw_state *bw_state =
16777 to_intel_bw_state(dev_priv->bw_obj.state);
16778 struct intel_crtc_state *crtc_state =
16779 to_intel_crtc_state(crtc->base.state);
16780 struct intel_plane *plane;
16781 int min_cdclk = 0;
16782
16783 memset(&crtc->base.mode, 0, sizeof(crtc->base.mode));
16784 if (crtc_state->base.active) {
16785 intel_mode_from_pipe_config(&crtc->base.mode, crtc_state);
16786 crtc->base.mode.hdisplay = crtc_state->pipe_src_w;
16787 crtc->base.mode.vdisplay = crtc_state->pipe_src_h;
16788 intel_mode_from_pipe_config(&crtc_state->base.adjusted_mode, crtc_state);
16789 WARN_ON(drm_atomic_set_mode_for_crtc(crtc->base.state, &crtc->base.mode));
16790
16791
16792
16793
16794
16795
16796
16797
16798
16799
16800 crtc_state->base.mode.private_flags = I915_MODE_FLAG_INHERITED;
16801
16802 intel_crtc_compute_pixel_rate(crtc_state);
16803
16804 if (dev_priv->display.modeset_calc_cdclk) {
16805 min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
16806 if (WARN_ON(min_cdclk < 0))
16807 min_cdclk = 0;
16808 }
16809
16810 drm_calc_timestamping_constants(&crtc->base,
16811 &crtc_state->base.adjusted_mode);
16812 update_scanline_offset(crtc_state);
16813 }
16814
16815 dev_priv->min_cdclk[crtc->pipe] = min_cdclk;
16816 dev_priv->min_voltage_level[crtc->pipe] =
16817 crtc_state->min_voltage_level;
16818
16819 for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane) {
16820 const struct intel_plane_state *plane_state =
16821 to_intel_plane_state(plane->base.state);
16822
16823
16824
16825
16826
16827 if (plane_state->base.visible)
16828 crtc_state->data_rate[plane->id] =
16829 4 * crtc_state->pixel_rate;
16830 }
16831
16832 intel_bw_crtc_update(bw_state, crtc_state);
16833
16834 intel_pipe_config_sanity_check(dev_priv, crtc_state);
16835 }
16836 }
16837
16838 static void
16839 get_encoder_power_domains(struct drm_i915_private *dev_priv)
16840 {
16841 struct intel_encoder *encoder;
16842
16843 for_each_intel_encoder(&dev_priv->drm, encoder) {
16844 struct intel_crtc_state *crtc_state;
16845
16846 if (!encoder->get_power_domains)
16847 continue;
16848
16849
16850
16851
16852
16853 if (!encoder->base.crtc)
16854 continue;
16855
16856 crtc_state = to_intel_crtc_state(encoder->base.crtc->state);
16857 encoder->get_power_domains(encoder, crtc_state);
16858 }
16859 }
16860
16861 static void intel_early_display_was(struct drm_i915_private *dev_priv)
16862 {
16863
16864
16865
16866
16867 if (IS_GEN_RANGE(dev_priv, 10, 12) || IS_GEMINILAKE(dev_priv))
16868 I915_WRITE(GEN9_CLKGATE_DIS_0, I915_READ(GEN9_CLKGATE_DIS_0) |
16869 DARBF_GATING_DIS);
16870
16871 if (IS_HASWELL(dev_priv)) {
16872
16873
16874
16875
16876 I915_WRITE(CHICKEN_PAR1_1,
16877 I915_READ(CHICKEN_PAR1_1) | FORCE_ARB_IDLE_PLANES);
16878 }
16879 }
16880
16881 static void ibx_sanitize_pch_hdmi_port(struct drm_i915_private *dev_priv,
16882 enum port port, i915_reg_t hdmi_reg)
16883 {
16884 u32 val = I915_READ(hdmi_reg);
16885
16886 if (val & SDVO_ENABLE ||
16887 (val & SDVO_PIPE_SEL_MASK) == SDVO_PIPE_SEL(PIPE_A))
16888 return;
16889
16890 DRM_DEBUG_KMS("Sanitizing transcoder select for HDMI %c\n",
16891 port_name(port));
16892
16893 val &= ~SDVO_PIPE_SEL_MASK;
16894 val |= SDVO_PIPE_SEL(PIPE_A);
16895
16896 I915_WRITE(hdmi_reg, val);
16897 }
16898
16899 static void ibx_sanitize_pch_dp_port(struct drm_i915_private *dev_priv,
16900 enum port port, i915_reg_t dp_reg)
16901 {
16902 u32 val = I915_READ(dp_reg);
16903
16904 if (val & DP_PORT_EN ||
16905 (val & DP_PIPE_SEL_MASK) == DP_PIPE_SEL(PIPE_A))
16906 return;
16907
16908 DRM_DEBUG_KMS("Sanitizing transcoder select for DP %c\n",
16909 port_name(port));
16910
16911 val &= ~DP_PIPE_SEL_MASK;
16912 val |= DP_PIPE_SEL(PIPE_A);
16913
16914 I915_WRITE(dp_reg, val);
16915 }
16916
16917 static void ibx_sanitize_pch_ports(struct drm_i915_private *dev_priv)
16918 {
16919
16920
16921
16922
16923
16924
16925
16926
16927
16928
16929
16930 ibx_sanitize_pch_dp_port(dev_priv, PORT_B, PCH_DP_B);
16931 ibx_sanitize_pch_dp_port(dev_priv, PORT_C, PCH_DP_C);
16932 ibx_sanitize_pch_dp_port(dev_priv, PORT_D, PCH_DP_D);
16933
16934
16935 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_B, PCH_HDMIB);
16936 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_C, PCH_HDMIC);
16937 ibx_sanitize_pch_hdmi_port(dev_priv, PORT_D, PCH_HDMID);
16938 }
16939
16940
16941
16942
16943 static void
16944 intel_modeset_setup_hw_state(struct drm_device *dev,
16945 struct drm_modeset_acquire_ctx *ctx)
16946 {
16947 struct drm_i915_private *dev_priv = to_i915(dev);
16948 struct intel_crtc_state *crtc_state;
16949 struct intel_encoder *encoder;
16950 struct intel_crtc *crtc;
16951 intel_wakeref_t wakeref;
16952 int i;
16953
16954 wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_INIT);
16955
16956 intel_early_display_was(dev_priv);
16957 intel_modeset_readout_hw_state(dev);
16958
16959
16960
16961
16962 for_each_intel_encoder(dev, encoder) {
16963 enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
16964
16965
16966 if (encoder->type != INTEL_OUTPUT_DP_MST &&
16967 intel_phy_is_tc(dev_priv, phy))
16968 intel_tc_port_sanitize(enc_to_dig_port(&encoder->base));
16969 }
16970
16971 get_encoder_power_domains(dev_priv);
16972
16973 if (HAS_PCH_IBX(dev_priv))
16974 ibx_sanitize_pch_ports(dev_priv);
16975
16976
16977
16978
16979
16980 for_each_intel_crtc(&dev_priv->drm, crtc) {
16981 crtc_state = to_intel_crtc_state(crtc->base.state);
16982
16983 drm_crtc_vblank_reset(&crtc->base);
16984
16985 if (crtc_state->base.active)
16986 intel_crtc_vblank_on(crtc_state);
16987 }
16988
16989 intel_sanitize_plane_mapping(dev_priv);
16990
16991 for_each_intel_encoder(dev, encoder)
16992 intel_sanitize_encoder(encoder);
16993
16994 for_each_intel_crtc(&dev_priv->drm, crtc) {
16995 crtc_state = to_intel_crtc_state(crtc->base.state);
16996 intel_sanitize_crtc(crtc, ctx);
16997 intel_dump_pipe_config(crtc_state, NULL, "[setup_hw_state]");
16998 }
16999
17000 intel_modeset_update_connector_atomic_state(dev);
17001
17002 for (i = 0; i < dev_priv->num_shared_dpll; i++) {
17003 struct intel_shared_dpll *pll = &dev_priv->shared_dplls[i];
17004
17005 if (!pll->on || pll->active_mask)
17006 continue;
17007
17008 DRM_DEBUG_KMS("%s enabled but not in use, disabling\n",
17009 pll->info->name);
17010
17011 pll->info->funcs->disable(dev_priv, pll);
17012 pll->on = false;
17013 }
17014
17015 if (IS_G4X(dev_priv)) {
17016 g4x_wm_get_hw_state(dev_priv);
17017 g4x_wm_sanitize(dev_priv);
17018 } else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) {
17019 vlv_wm_get_hw_state(dev_priv);
17020 vlv_wm_sanitize(dev_priv);
17021 } else if (INTEL_GEN(dev_priv) >= 9) {
17022 skl_wm_get_hw_state(dev_priv);
17023 } else if (HAS_PCH_SPLIT(dev_priv)) {
17024 ilk_wm_get_hw_state(dev_priv);
17025 }
17026
17027 for_each_intel_crtc(dev, crtc) {
17028 u64 put_domains;
17029
17030 crtc_state = to_intel_crtc_state(crtc->base.state);
17031 put_domains = modeset_get_crtc_power_domains(crtc_state);
17032 if (WARN_ON(put_domains))
17033 modeset_put_power_domains(dev_priv, put_domains);
17034 }
17035
17036 intel_display_power_put(dev_priv, POWER_DOMAIN_INIT, wakeref);
17037
17038 intel_fbc_init_pipe_state(dev_priv);
17039 }
17040
17041 void intel_display_resume(struct drm_device *dev)
17042 {
17043 struct drm_i915_private *dev_priv = to_i915(dev);
17044 struct drm_atomic_state *state = dev_priv->modeset_restore_state;
17045 struct drm_modeset_acquire_ctx ctx;
17046 int ret;
17047
17048 dev_priv->modeset_restore_state = NULL;
17049 if (state)
17050 state->acquire_ctx = &ctx;
17051
17052 drm_modeset_acquire_init(&ctx, 0);
17053
17054 while (1) {
17055 ret = drm_modeset_lock_all_ctx(dev, &ctx);
17056 if (ret != -EDEADLK)
17057 break;
17058
17059 drm_modeset_backoff(&ctx);
17060 }
17061
17062 if (!ret)
17063 ret = __intel_display_resume(dev, state, &ctx);
17064
17065 intel_enable_ipc(dev_priv);
17066 drm_modeset_drop_locks(&ctx);
17067 drm_modeset_acquire_fini(&ctx);
17068
17069 if (ret)
17070 DRM_ERROR("Restoring old state failed with %i\n", ret);
17071 if (state)
17072 drm_atomic_state_put(state);
17073 }
17074
17075 static void intel_hpd_poll_fini(struct drm_device *dev)
17076 {
17077 struct intel_connector *connector;
17078 struct drm_connector_list_iter conn_iter;
17079
17080
17081 drm_connector_list_iter_begin(dev, &conn_iter);
17082 for_each_intel_connector_iter(connector, &conn_iter) {
17083 if (connector->modeset_retry_work.func)
17084 cancel_work_sync(&connector->modeset_retry_work);
17085 if (connector->hdcp.shim) {
17086 cancel_delayed_work_sync(&connector->hdcp.check_work);
17087 cancel_work_sync(&connector->hdcp.prop_work);
17088 }
17089 }
17090 drm_connector_list_iter_end(&conn_iter);
17091 }
17092
17093 void intel_modeset_driver_remove(struct drm_device *dev)
17094 {
17095 struct drm_i915_private *dev_priv = to_i915(dev);
17096
17097 flush_workqueue(dev_priv->modeset_wq);
17098
17099 flush_work(&dev_priv->atomic_helper.free_work);
17100 WARN_ON(!llist_empty(&dev_priv->atomic_helper.free_list));
17101
17102
17103
17104
17105
17106
17107 intel_irq_uninstall(dev_priv);
17108
17109
17110
17111
17112
17113 intel_hpd_poll_fini(dev);
17114
17115
17116 intel_fbdev_fini(dev_priv);
17117
17118 intel_unregister_dsm_handler();
17119
17120 intel_fbc_global_disable(dev_priv);
17121
17122
17123 flush_scheduled_work();
17124
17125 intel_hdcp_component_fini(dev_priv);
17126
17127 drm_mode_config_cleanup(dev);
17128
17129 intel_overlay_cleanup(dev_priv);
17130
17131 intel_gmbus_teardown(dev_priv);
17132
17133 destroy_workqueue(dev_priv->modeset_wq);
17134
17135 intel_fbc_cleanup_cfb(dev_priv);
17136 }
17137
17138
17139
17140
17141 int intel_modeset_vga_set_state(struct drm_i915_private *dev_priv, bool state)
17142 {
17143 unsigned reg = INTEL_GEN(dev_priv) >= 6 ? SNB_GMCH_CTRL : INTEL_GMCH_CTRL;
17144 u16 gmch_ctrl;
17145
17146 if (pci_read_config_word(dev_priv->bridge_dev, reg, &gmch_ctrl)) {
17147 DRM_ERROR("failed to read control word\n");
17148 return -EIO;
17149 }
17150
17151 if (!!(gmch_ctrl & INTEL_GMCH_VGA_DISABLE) == !state)
17152 return 0;
17153
17154 if (state)
17155 gmch_ctrl &= ~INTEL_GMCH_VGA_DISABLE;
17156 else
17157 gmch_ctrl |= INTEL_GMCH_VGA_DISABLE;
17158
17159 if (pci_write_config_word(dev_priv->bridge_dev, reg, gmch_ctrl)) {
17160 DRM_ERROR("failed to write control word\n");
17161 return -EIO;
17162 }
17163
17164 return 0;
17165 }
17166
17167 #if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
17168
17169 struct intel_display_error_state {
17170
17171 u32 power_well_driver;
17172
17173 struct intel_cursor_error_state {
17174 u32 control;
17175 u32 position;
17176 u32 base;
17177 u32 size;
17178 } cursor[I915_MAX_PIPES];
17179
17180 struct intel_pipe_error_state {
17181 bool power_domain_on;
17182 u32 source;
17183 u32 stat;
17184 } pipe[I915_MAX_PIPES];
17185
17186 struct intel_plane_error_state {
17187 u32 control;
17188 u32 stride;
17189 u32 size;
17190 u32 pos;
17191 u32 addr;
17192 u32 surface;
17193 u32 tile_offset;
17194 } plane[I915_MAX_PIPES];
17195
17196 struct intel_transcoder_error_state {
17197 bool available;
17198 bool power_domain_on;
17199 enum transcoder cpu_transcoder;
17200
17201 u32 conf;
17202
17203 u32 htotal;
17204 u32 hblank;
17205 u32 hsync;
17206 u32 vtotal;
17207 u32 vblank;
17208 u32 vsync;
17209 } transcoder[5];
17210 };
17211
17212 struct intel_display_error_state *
17213 intel_display_capture_error_state(struct drm_i915_private *dev_priv)
17214 {
17215 struct intel_display_error_state *error;
17216 int transcoders[] = {
17217 TRANSCODER_A,
17218 TRANSCODER_B,
17219 TRANSCODER_C,
17220 TRANSCODER_D,
17221 TRANSCODER_EDP,
17222 };
17223 int i;
17224
17225 BUILD_BUG_ON(ARRAY_SIZE(transcoders) != ARRAY_SIZE(error->transcoder));
17226
17227 if (!HAS_DISPLAY(dev_priv))
17228 return NULL;
17229
17230 error = kzalloc(sizeof(*error), GFP_ATOMIC);
17231 if (error == NULL)
17232 return NULL;
17233
17234 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
17235 error->power_well_driver = I915_READ(HSW_PWR_WELL_CTL2);
17236
17237 for_each_pipe(dev_priv, i) {
17238 error->pipe[i].power_domain_on =
17239 __intel_display_power_is_enabled(dev_priv,
17240 POWER_DOMAIN_PIPE(i));
17241 if (!error->pipe[i].power_domain_on)
17242 continue;
17243
17244 error->cursor[i].control = I915_READ(CURCNTR(i));
17245 error->cursor[i].position = I915_READ(CURPOS(i));
17246 error->cursor[i].base = I915_READ(CURBASE(i));
17247
17248 error->plane[i].control = I915_READ(DSPCNTR(i));
17249 error->plane[i].stride = I915_READ(DSPSTRIDE(i));
17250 if (INTEL_GEN(dev_priv) <= 3) {
17251 error->plane[i].size = I915_READ(DSPSIZE(i));
17252 error->plane[i].pos = I915_READ(DSPPOS(i));
17253 }
17254 if (INTEL_GEN(dev_priv) <= 7 && !IS_HASWELL(dev_priv))
17255 error->plane[i].addr = I915_READ(DSPADDR(i));
17256 if (INTEL_GEN(dev_priv) >= 4) {
17257 error->plane[i].surface = I915_READ(DSPSURF(i));
17258 error->plane[i].tile_offset = I915_READ(DSPTILEOFF(i));
17259 }
17260
17261 error->pipe[i].source = I915_READ(PIPESRC(i));
17262
17263 if (HAS_GMCH(dev_priv))
17264 error->pipe[i].stat = I915_READ(PIPESTAT(i));
17265 }
17266
17267 for (i = 0; i < ARRAY_SIZE(error->transcoder); i++) {
17268 enum transcoder cpu_transcoder = transcoders[i];
17269
17270 if (!INTEL_INFO(dev_priv)->trans_offsets[cpu_transcoder])
17271 continue;
17272
17273 error->transcoder[i].available = true;
17274 error->transcoder[i].power_domain_on =
17275 __intel_display_power_is_enabled(dev_priv,
17276 POWER_DOMAIN_TRANSCODER(cpu_transcoder));
17277 if (!error->transcoder[i].power_domain_on)
17278 continue;
17279
17280 error->transcoder[i].cpu_transcoder = cpu_transcoder;
17281
17282 error->transcoder[i].conf = I915_READ(PIPECONF(cpu_transcoder));
17283 error->transcoder[i].htotal = I915_READ(HTOTAL(cpu_transcoder));
17284 error->transcoder[i].hblank = I915_READ(HBLANK(cpu_transcoder));
17285 error->transcoder[i].hsync = I915_READ(HSYNC(cpu_transcoder));
17286 error->transcoder[i].vtotal = I915_READ(VTOTAL(cpu_transcoder));
17287 error->transcoder[i].vblank = I915_READ(VBLANK(cpu_transcoder));
17288 error->transcoder[i].vsync = I915_READ(VSYNC(cpu_transcoder));
17289 }
17290
17291 return error;
17292 }
17293
17294 #define err_printf(e, ...) i915_error_printf(e, __VA_ARGS__)
17295
17296 void
17297 intel_display_print_error_state(struct drm_i915_error_state_buf *m,
17298 struct intel_display_error_state *error)
17299 {
17300 struct drm_i915_private *dev_priv = m->i915;
17301 int i;
17302
17303 if (!error)
17304 return;
17305
17306 err_printf(m, "Num Pipes: %d\n", INTEL_INFO(dev_priv)->num_pipes);
17307 if (IS_HASWELL(dev_priv) || IS_BROADWELL(dev_priv))
17308 err_printf(m, "PWR_WELL_CTL2: %08x\n",
17309 error->power_well_driver);
17310 for_each_pipe(dev_priv, i) {
17311 err_printf(m, "Pipe [%d]:\n", i);
17312 err_printf(m, " Power: %s\n",
17313 onoff(error->pipe[i].power_domain_on));
17314 err_printf(m, " SRC: %08x\n", error->pipe[i].source);
17315 err_printf(m, " STAT: %08x\n", error->pipe[i].stat);
17316
17317 err_printf(m, "Plane [%d]:\n", i);
17318 err_printf(m, " CNTR: %08x\n", error->plane[i].control);
17319 err_printf(m, " STRIDE: %08x\n", error->plane[i].stride);
17320 if (INTEL_GEN(dev_priv) <= 3) {
17321 err_printf(m, " SIZE: %08x\n", error->plane[i].size);
17322 err_printf(m, " POS: %08x\n", error->plane[i].pos);
17323 }
17324 if (INTEL_GEN(dev_priv) <= 7 && !IS_HASWELL(dev_priv))
17325 err_printf(m, " ADDR: %08x\n", error->plane[i].addr);
17326 if (INTEL_GEN(dev_priv) >= 4) {
17327 err_printf(m, " SURF: %08x\n", error->plane[i].surface);
17328 err_printf(m, " TILEOFF: %08x\n", error->plane[i].tile_offset);
17329 }
17330
17331 err_printf(m, "Cursor [%d]:\n", i);
17332 err_printf(m, " CNTR: %08x\n", error->cursor[i].control);
17333 err_printf(m, " POS: %08x\n", error->cursor[i].position);
17334 err_printf(m, " BASE: %08x\n", error->cursor[i].base);
17335 }
17336
17337 for (i = 0; i < ARRAY_SIZE(error->transcoder); i++) {
17338 if (!error->transcoder[i].available)
17339 continue;
17340
17341 err_printf(m, "CPU transcoder: %s\n",
17342 transcoder_name(error->transcoder[i].cpu_transcoder));
17343 err_printf(m, " Power: %s\n",
17344 onoff(error->transcoder[i].power_domain_on));
17345 err_printf(m, " CONF: %08x\n", error->transcoder[i].conf);
17346 err_printf(m, " HTOTAL: %08x\n", error->transcoder[i].htotal);
17347 err_printf(m, " HBLANK: %08x\n", error->transcoder[i].hblank);
17348 err_printf(m, " HSYNC: %08x\n", error->transcoder[i].hsync);
17349 err_printf(m, " VTOTAL: %08x\n", error->transcoder[i].vtotal);
17350 err_printf(m, " VBLANK: %08x\n", error->transcoder[i].vblank);
17351 err_printf(m, " VSYNC: %08x\n", error->transcoder[i].vsync);
17352 }
17353 }
17354
17355 #endif