]> bbs.cooldavid.org Git - net-next-2.6.git/blame - drivers/gpu/drm/radeon/atombios_crtc.c
drm/radeon/kms: force legacy pll algo for RV515 LVDS
[net-next-2.6.git] / drivers / gpu / drm / radeon / atombios_crtc.c
CommitLineData
771fe6b9
JG
1/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26#include <drm/drmP.h>
27#include <drm/drm_crtc_helper.h>
28#include <drm/radeon_drm.h>
68adac5e 29#include <drm/drm_fixed.h>
771fe6b9
JG
30#include "radeon.h"
31#include "atom.h"
32#include "atom-bits.h"
33
c93bb85b
JG
34static void atombios_overscan_setup(struct drm_crtc *crtc,
35 struct drm_display_mode *mode,
36 struct drm_display_mode *adjusted_mode)
37{
38 struct drm_device *dev = crtc->dev;
39 struct radeon_device *rdev = dev->dev_private;
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 int a1, a2;
44
45 memset(&args, 0, sizeof(args));
46
c93bb85b
JG
47 args.ucCRTC = radeon_crtc->crtc_id;
48
49 switch (radeon_crtc->rmx_type) {
50 case RMX_CENTER:
51 args.usOverscanTop = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
52 args.usOverscanBottom = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
53 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
54 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
c93bb85b
JG
55 break;
56 case RMX_ASPECT:
57 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
58 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
59
60 if (a1 > a2) {
61 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
62 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
63 } else if (a2 > a1) {
64 args.usOverscanLeft = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
65 args.usOverscanRight = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
66 }
c93bb85b
JG
67 break;
68 case RMX_FULL:
69 default:
5b1714d3
AD
70 args.usOverscanRight = radeon_crtc->h_border;
71 args.usOverscanLeft = radeon_crtc->h_border;
72 args.usOverscanBottom = radeon_crtc->v_border;
73 args.usOverscanTop = radeon_crtc->v_border;
c93bb85b
JG
74 break;
75 }
5b1714d3 76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
c93bb85b
JG
77}
78
79static void atombios_scaler_setup(struct drm_crtc *crtc)
80{
81 struct drm_device *dev = crtc->dev;
82 struct radeon_device *rdev = dev->dev_private;
83 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
84 ENABLE_SCALER_PS_ALLOCATION args;
85 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
4ce001ab 86
c93bb85b
JG
87 /* fixme - fill in enc_priv for atom dac */
88 enum radeon_tv_std tv_std = TV_STD_NTSC;
4ce001ab
DA
89 bool is_tv = false, is_cv = false;
90 struct drm_encoder *encoder;
c93bb85b
JG
91
92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
93 return;
94
4ce001ab
DA
95 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
96 /* find tv std */
97 if (encoder->crtc == crtc) {
98 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
99 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 tv_std = tv_dac->tv_std;
102 is_tv = true;
103 }
104 }
105 }
106
c93bb85b
JG
107 memset(&args, 0, sizeof(args));
108
109 args.ucScaler = radeon_crtc->crtc_id;
110
4ce001ab 111 if (is_tv) {
c93bb85b
JG
112 switch (tv_std) {
113 case TV_STD_NTSC:
114 default:
115 args.ucTVStandard = ATOM_TV_NTSC;
116 break;
117 case TV_STD_PAL:
118 args.ucTVStandard = ATOM_TV_PAL;
119 break;
120 case TV_STD_PAL_M:
121 args.ucTVStandard = ATOM_TV_PALM;
122 break;
123 case TV_STD_PAL_60:
124 args.ucTVStandard = ATOM_TV_PAL60;
125 break;
126 case TV_STD_NTSC_J:
127 args.ucTVStandard = ATOM_TV_NTSCJ;
128 break;
129 case TV_STD_SCART_PAL:
130 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
131 break;
132 case TV_STD_SECAM:
133 args.ucTVStandard = ATOM_TV_SECAM;
134 break;
135 case TV_STD_PAL_CN:
136 args.ucTVStandard = ATOM_TV_PALCN;
137 break;
138 }
139 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
4ce001ab 140 } else if (is_cv) {
c93bb85b
JG
141 args.ucTVStandard = ATOM_TV_CV;
142 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
143 } else {
144 switch (radeon_crtc->rmx_type) {
145 case RMX_FULL:
146 args.ucEnable = ATOM_SCALER_EXPANSION;
147 break;
148 case RMX_CENTER:
149 args.ucEnable = ATOM_SCALER_CENTER;
150 break;
151 case RMX_ASPECT:
152 args.ucEnable = ATOM_SCALER_EXPANSION;
153 break;
154 default:
155 if (ASIC_IS_AVIVO(rdev))
156 args.ucEnable = ATOM_SCALER_DISABLE;
157 else
158 args.ucEnable = ATOM_SCALER_CENTER;
159 break;
160 }
161 }
162 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
4ce001ab
DA
163 if ((is_tv || is_cv)
164 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
165 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
c93bb85b
JG
166 }
167}
168
771fe6b9
JG
169static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
170{
171 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
172 struct drm_device *dev = crtc->dev;
173 struct radeon_device *rdev = dev->dev_private;
174 int index =
175 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
176 ENABLE_CRTC_PS_ALLOCATION args;
177
178 memset(&args, 0, sizeof(args));
179
180 args.ucCRTC = radeon_crtc->crtc_id;
181 args.ucEnable = lock;
182
183 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
184}
185
186static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
187{
188 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
189 struct drm_device *dev = crtc->dev;
190 struct radeon_device *rdev = dev->dev_private;
191 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
192 ENABLE_CRTC_PS_ALLOCATION args;
193
194 memset(&args, 0, sizeof(args));
195
196 args.ucCRTC = radeon_crtc->crtc_id;
197 args.ucEnable = state;
198
199 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
200}
201
202static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
203{
204 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
205 struct drm_device *dev = crtc->dev;
206 struct radeon_device *rdev = dev->dev_private;
207 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
208 ENABLE_CRTC_PS_ALLOCATION args;
209
210 memset(&args, 0, sizeof(args));
211
212 args.ucCRTC = radeon_crtc->crtc_id;
213 args.ucEnable = state;
214
215 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
216}
217
218static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
219{
220 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
221 struct drm_device *dev = crtc->dev;
222 struct radeon_device *rdev = dev->dev_private;
223 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
224 BLANK_CRTC_PS_ALLOCATION args;
225
226 memset(&args, 0, sizeof(args));
227
228 args.ucCRTC = radeon_crtc->crtc_id;
229 args.ucBlanking = state;
230
231 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
232}
233
234void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
235{
236 struct drm_device *dev = crtc->dev;
237 struct radeon_device *rdev = dev->dev_private;
500b7587 238 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
771fe6b9
JG
239
240 switch (mode) {
241 case DRM_MODE_DPMS_ON:
d7311171
AD
242 radeon_crtc->enabled = true;
243 /* adjust pm to dpms changes BEFORE enabling crtcs */
244 radeon_pm_compute_clocks(rdev);
37b4390e 245 atombios_enable_crtc(crtc, ATOM_ENABLE);
771fe6b9 246 if (ASIC_IS_DCE3(rdev))
37b4390e
AD
247 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
248 atombios_blank_crtc(crtc, ATOM_DISABLE);
45f9a39b 249 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
500b7587 250 radeon_crtc_load_lut(crtc);
771fe6b9
JG
251 break;
252 case DRM_MODE_DPMS_STANDBY:
253 case DRM_MODE_DPMS_SUSPEND:
254 case DRM_MODE_DPMS_OFF:
45f9a39b 255 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
37b4390e 256 atombios_blank_crtc(crtc, ATOM_ENABLE);
771fe6b9 257 if (ASIC_IS_DCE3(rdev))
37b4390e
AD
258 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
259 atombios_enable_crtc(crtc, ATOM_DISABLE);
a48b9b4e 260 radeon_crtc->enabled = false;
d7311171
AD
261 /* adjust pm to dpms changes AFTER disabling crtcs */
262 radeon_pm_compute_clocks(rdev);
771fe6b9
JG
263 break;
264 }
771fe6b9
JG
265}
266
267static void
268atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
5a9bcacc 269 struct drm_display_mode *mode)
771fe6b9 270{
5a9bcacc 271 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
771fe6b9
JG
272 struct drm_device *dev = crtc->dev;
273 struct radeon_device *rdev = dev->dev_private;
5a9bcacc 274 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
771fe6b9 275 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
5a9bcacc 276 u16 misc = 0;
771fe6b9 277
5a9bcacc 278 memset(&args, 0, sizeof(args));
5b1714d3 279 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
5a9bcacc 280 args.usH_Blanking_Time =
5b1714d3
AD
281 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
282 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
5a9bcacc 283 args.usV_Blanking_Time =
5b1714d3 284 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
5a9bcacc 285 args.usH_SyncOffset =
5b1714d3 286 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
5a9bcacc
AD
287 args.usH_SyncWidth =
288 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
289 args.usV_SyncOffset =
5b1714d3 290 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
5a9bcacc
AD
291 args.usV_SyncWidth =
292 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
5b1714d3
AD
293 args.ucH_Border = radeon_crtc->h_border;
294 args.ucV_Border = radeon_crtc->v_border;
5a9bcacc
AD
295
296 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
297 misc |= ATOM_VSYNC_POLARITY;
298 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
299 misc |= ATOM_HSYNC_POLARITY;
300 if (mode->flags & DRM_MODE_FLAG_CSYNC)
301 misc |= ATOM_COMPOSITESYNC;
302 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
303 misc |= ATOM_INTERLACE;
304 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
305 misc |= ATOM_DOUBLE_CLOCK_MODE;
306
307 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
308 args.ucCRTC = radeon_crtc->crtc_id;
771fe6b9 309
5a9bcacc 310 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
771fe6b9
JG
311}
312
5a9bcacc
AD
313static void atombios_crtc_set_timing(struct drm_crtc *crtc,
314 struct drm_display_mode *mode)
771fe6b9 315{
5a9bcacc 316 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
771fe6b9
JG
317 struct drm_device *dev = crtc->dev;
318 struct radeon_device *rdev = dev->dev_private;
5a9bcacc 319 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
771fe6b9 320 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
5a9bcacc 321 u16 misc = 0;
771fe6b9 322
5a9bcacc
AD
323 memset(&args, 0, sizeof(args));
324 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
325 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
326 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
327 args.usH_SyncWidth =
328 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
329 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
330 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
331 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
332 args.usV_SyncWidth =
333 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
334
335 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
336 misc |= ATOM_VSYNC_POLARITY;
337 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
338 misc |= ATOM_HSYNC_POLARITY;
339 if (mode->flags & DRM_MODE_FLAG_CSYNC)
340 misc |= ATOM_COMPOSITESYNC;
341 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
342 misc |= ATOM_INTERLACE;
343 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
344 misc |= ATOM_DOUBLE_CLOCK_MODE;
345
346 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
347 args.ucCRTC = radeon_crtc->crtc_id;
771fe6b9 348
5a9bcacc 349 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
771fe6b9
JG
350}
351
b792210e
AD
352static void atombios_disable_ss(struct drm_crtc *crtc)
353{
354 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
355 struct drm_device *dev = crtc->dev;
356 struct radeon_device *rdev = dev->dev_private;
357 u32 ss_cntl;
358
359 if (ASIC_IS_DCE4(rdev)) {
360 switch (radeon_crtc->pll_id) {
361 case ATOM_PPLL1:
362 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
363 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
364 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
365 break;
366 case ATOM_PPLL2:
367 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
368 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
369 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
370 break;
371 case ATOM_DCPLL:
372 case ATOM_PPLL_INVALID:
373 return;
374 }
375 } else if (ASIC_IS_AVIVO(rdev)) {
376 switch (radeon_crtc->pll_id) {
377 case ATOM_PPLL1:
378 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
379 ss_cntl &= ~1;
380 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
381 break;
382 case ATOM_PPLL2:
383 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
384 ss_cntl &= ~1;
385 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
386 break;
387 case ATOM_DCPLL:
388 case ATOM_PPLL_INVALID:
389 return;
390 }
391 }
392}
393
394
26b9fc3a
AD
395union atom_enable_ss {
396 ENABLE_LVDS_SS_PARAMETERS legacy;
397 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
398};
399
b792210e 400static void atombios_enable_ss(struct drm_crtc *crtc)
ebbe1cb9
AD
401{
402 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
403 struct drm_device *dev = crtc->dev;
404 struct radeon_device *rdev = dev->dev_private;
405 struct drm_encoder *encoder = NULL;
406 struct radeon_encoder *radeon_encoder = NULL;
407 struct radeon_encoder_atom_dig *dig = NULL;
408 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
26b9fc3a 409 union atom_enable_ss args;
ebbe1cb9
AD
410 uint16_t percentage = 0;
411 uint8_t type = 0, step = 0, delay = 0, range = 0;
412
bcc1c2a1
AD
413 /* XXX add ss support for DCE4 */
414 if (ASIC_IS_DCE4(rdev))
415 return;
416
ebbe1cb9
AD
417 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
418 if (encoder->crtc == crtc) {
419 radeon_encoder = to_radeon_encoder(encoder);
ebbe1cb9 420 /* only enable spread spectrum on LVDS */
d11aa88b
AD
421 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
422 dig = radeon_encoder->enc_priv;
423 if (dig && dig->ss) {
424 percentage = dig->ss->percentage;
425 type = dig->ss->type;
426 step = dig->ss->step;
427 delay = dig->ss->delay;
428 range = dig->ss->range;
b792210e 429 } else
d11aa88b 430 return;
b792210e 431 } else
ebbe1cb9
AD
432 return;
433 break;
434 }
435 }
436
437 if (!radeon_encoder)
438 return;
439
26b9fc3a 440 memset(&args, 0, sizeof(args));
ebbe1cb9 441 if (ASIC_IS_AVIVO(rdev)) {
26b9fc3a
AD
442 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
443 args.v1.ucSpreadSpectrumType = type;
444 args.v1.ucSpreadSpectrumStep = step;
445 args.v1.ucSpreadSpectrumDelay = delay;
446 args.v1.ucSpreadSpectrumRange = range;
447 args.v1.ucPpll = radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
b792210e 448 args.v1.ucEnable = ATOM_ENABLE;
ebbe1cb9 449 } else {
26b9fc3a
AD
450 args.legacy.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
451 args.legacy.ucSpreadSpectrumType = type;
452 args.legacy.ucSpreadSpectrumStepSize_Delay = (step & 3) << 2;
453 args.legacy.ucSpreadSpectrumStepSize_Delay |= (delay & 7) << 4;
b792210e 454 args.legacy.ucEnable = ATOM_ENABLE;
ebbe1cb9 455 }
26b9fc3a 456 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
ebbe1cb9
AD
457}
458
4eaeca33
AD
459union adjust_pixel_clock {
460 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
bcc1c2a1 461 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
4eaeca33
AD
462};
463
464static u32 atombios_adjust_pll(struct drm_crtc *crtc,
465 struct drm_display_mode *mode,
466 struct radeon_pll *pll)
771fe6b9 467{
771fe6b9
JG
468 struct drm_device *dev = crtc->dev;
469 struct radeon_device *rdev = dev->dev_private;
470 struct drm_encoder *encoder = NULL;
471 struct radeon_encoder *radeon_encoder = NULL;
4eaeca33 472 u32 adjusted_clock = mode->clock;
bcc1c2a1 473 int encoder_mode = 0;
fbee67a6
AD
474 u32 dp_clock = mode->clock;
475 int bpc = 8;
fc10332b 476
4eaeca33
AD
477 /* reset the pll flags */
478 pll->flags = 0;
771fe6b9 479
7c27f87d
AD
480 /* select the PLL algo */
481 if (ASIC_IS_AVIVO(rdev)) {
383be5d1
AD
482 if (radeon_new_pll == 0)
483 pll->algo = PLL_ALGO_LEGACY;
484 else
485 pll->algo = PLL_ALGO_NEW;
486 } else {
487 if (radeon_new_pll == 1)
488 pll->algo = PLL_ALGO_NEW;
7c27f87d
AD
489 else
490 pll->algo = PLL_ALGO_LEGACY;
383be5d1 491 }
7c27f87d 492
771fe6b9 493 if (ASIC_IS_AVIVO(rdev)) {
eb1300bc
AD
494 if ((rdev->family == CHIP_RS600) ||
495 (rdev->family == CHIP_RS690) ||
496 (rdev->family == CHIP_RS740))
2ff776cf 497 pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
fc10332b 498 RADEON_PLL_PREFER_CLOSEST_LOWER);
eb1300bc 499
771fe6b9 500 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
fc10332b 501 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
771fe6b9 502 else
fc10332b 503 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
771fe6b9 504 } else {
fc10332b 505 pll->flags |= RADEON_PLL_LEGACY;
771fe6b9
JG
506
507 if (mode->clock > 200000) /* range limits??? */
fc10332b 508 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
771fe6b9 509 else
fc10332b 510 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
771fe6b9
JG
511
512 }
513
514 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
515 if (encoder->crtc == crtc) {
4eaeca33 516 radeon_encoder = to_radeon_encoder(encoder);
bcc1c2a1 517 encoder_mode = atombios_get_encoder_mode(encoder);
fbee67a6
AD
518 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) {
519 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
520 if (connector) {
521 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
522 struct radeon_connector_atom_dig *dig_connector =
523 radeon_connector->con_priv;
524
525 dp_clock = dig_connector->dp_clock;
526 }
527 }
528
4eaeca33
AD
529 if (ASIC_IS_AVIVO(rdev)) {
530 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
531 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
532 adjusted_clock = mode->clock * 2;
a1a4b23b
AD
533 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) {
534 pll->algo = PLL_ALGO_LEGACY;
535 pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
536 }
0d9958b1
AD
537 /* There is some evidence (often anecdotal) that RV515 LVDS
538 * (on some boards at least) prefers the legacy algo. I'm not
539 * sure whether this should handled generically or on a
540 * case-by-case quirk basis. Both algos should work fine in the
541 * majority of cases.
542 */
543 if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) &&
544 (rdev->family == CHIP_RV515)) {
545 /* allow the user to overrride just in case */
546 if (radeon_new_pll == 1)
547 pll->algo = PLL_ALGO_NEW;
548 else
549 pll->algo = PLL_ALGO_LEGACY;
550 }
4eaeca33
AD
551 } else {
552 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
fc10332b 553 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
4eaeca33 554 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
fc10332b 555 pll->flags |= RADEON_PLL_USE_REF_DIV;
771fe6b9 556 }
3ce0a23d 557 break;
771fe6b9
JG
558 }
559 }
560
2606c886
AD
561 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
562 * accordingly based on the encoder/transmitter to work around
563 * special hw requirements.
564 */
565 if (ASIC_IS_DCE3(rdev)) {
4eaeca33 566 union adjust_pixel_clock args;
4eaeca33
AD
567 u8 frev, crev;
568 int index;
2606c886 569
2606c886 570 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
a084e6ee
AD
571 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
572 &crev))
573 return adjusted_clock;
4eaeca33
AD
574
575 memset(&args, 0, sizeof(args));
576
577 switch (frev) {
578 case 1:
579 switch (crev) {
580 case 1:
581 case 2:
582 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
583 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
bcc1c2a1 584 args.v1.ucEncodeMode = encoder_mode;
fbee67a6
AD
585 if (encoder_mode == ATOM_ENCODER_MODE_DP) {
586 /* may want to enable SS on DP eventually */
587 /* args.v1.ucConfig |=
588 ADJUST_DISPLAY_CONFIG_SS_ENABLE;*/
589 } else if (encoder_mode == ATOM_ENCODER_MODE_LVDS) {
590 args.v1.ucConfig |=
591 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
592 }
4eaeca33
AD
593
594 atom_execute_table(rdev->mode_info.atom_context,
595 index, (uint32_t *)&args);
596 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
597 break;
bcc1c2a1
AD
598 case 3:
599 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
600 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
601 args.v3.sInput.ucEncodeMode = encoder_mode;
602 args.v3.sInput.ucDispPllConfig = 0;
603 if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
604 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
605
fbee67a6
AD
606 if (encoder_mode == ATOM_ENCODER_MODE_DP) {
607 /* may want to enable SS on DP/eDP eventually */
608 /*args.v3.sInput.ucDispPllConfig |=
609 DISPPLL_CONFIG_SS_ENABLE;*/
bcc1c2a1
AD
610 args.v3.sInput.ucDispPllConfig |=
611 DISPPLL_CONFIG_COHERENT_MODE;
fbee67a6
AD
612 /* 16200 or 27000 */
613 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
614 } else {
615 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
616 /* deep color support */
617 args.v3.sInput.usPixelClock =
618 cpu_to_le16((mode->clock * bpc / 8) / 10);
619 }
bcc1c2a1
AD
620 if (dig->coherent_mode)
621 args.v3.sInput.ucDispPllConfig |=
622 DISPPLL_CONFIG_COHERENT_MODE;
623 if (mode->clock > 165000)
624 args.v3.sInput.ucDispPllConfig |=
625 DISPPLL_CONFIG_DUAL_LINK;
626 }
627 } else if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
fbee67a6
AD
628 if (encoder_mode == ATOM_ENCODER_MODE_DP) {
629 /* may want to enable SS on DP/eDP eventually */
630 /*args.v3.sInput.ucDispPllConfig |=
631 DISPPLL_CONFIG_SS_ENABLE;*/
bcc1c2a1 632 args.v3.sInput.ucDispPllConfig |=
9f998ad7 633 DISPPLL_CONFIG_COHERENT_MODE;
fbee67a6
AD
634 /* 16200 or 27000 */
635 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
636 } else if (encoder_mode == ATOM_ENCODER_MODE_LVDS) {
637 /* want to enable SS on LVDS eventually */
638 /*args.v3.sInput.ucDispPllConfig |=
639 DISPPLL_CONFIG_SS_ENABLE;*/
640 } else {
9f998ad7
AD
641 if (mode->clock > 165000)
642 args.v3.sInput.ucDispPllConfig |=
643 DISPPLL_CONFIG_DUAL_LINK;
644 }
bcc1c2a1
AD
645 }
646 atom_execute_table(rdev->mode_info.atom_context,
647 index, (uint32_t *)&args);
648 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
649 if (args.v3.sOutput.ucRefDiv) {
650 pll->flags |= RADEON_PLL_USE_REF_DIV;
651 pll->reference_div = args.v3.sOutput.ucRefDiv;
652 }
653 if (args.v3.sOutput.ucPostDiv) {
654 pll->flags |= RADEON_PLL_USE_POST_DIV;
655 pll->post_div = args.v3.sOutput.ucPostDiv;
656 }
657 break;
4eaeca33
AD
658 default:
659 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
660 return adjusted_clock;
661 }
662 break;
663 default:
664 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
665 return adjusted_clock;
666 }
d56ef9c8 667 }
4eaeca33
AD
668 return adjusted_clock;
669}
670
671union set_pixel_clock {
672 SET_PIXEL_CLOCK_PS_ALLOCATION base;
673 PIXEL_CLOCK_PARAMETERS v1;
674 PIXEL_CLOCK_PARAMETERS_V2 v2;
675 PIXEL_CLOCK_PARAMETERS_V3 v3;
bcc1c2a1 676 PIXEL_CLOCK_PARAMETERS_V5 v5;
4eaeca33
AD
677};
678
bcc1c2a1
AD
679static void atombios_crtc_set_dcpll(struct drm_crtc *crtc)
680{
681 struct drm_device *dev = crtc->dev;
682 struct radeon_device *rdev = dev->dev_private;
683 u8 frev, crev;
684 int index;
685 union set_pixel_clock args;
686
687 memset(&args, 0, sizeof(args));
688
689 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
a084e6ee
AD
690 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
691 &crev))
692 return;
bcc1c2a1
AD
693
694 switch (frev) {
695 case 1:
696 switch (crev) {
697 case 5:
698 /* if the default dcpll clock is specified,
699 * SetPixelClock provides the dividers
700 */
701 args.v5.ucCRTC = ATOM_CRTC_INVALID;
702 args.v5.usPixelClock = rdev->clock.default_dispclk;
703 args.v5.ucPpll = ATOM_DCPLL;
704 break;
705 default:
706 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
707 return;
708 }
709 break;
710 default:
711 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
712 return;
713 }
714 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
715}
716
37f9003b
AD
717static void atombios_crtc_program_pll(struct drm_crtc *crtc,
718 int crtc_id,
719 int pll_id,
720 u32 encoder_mode,
721 u32 encoder_id,
722 u32 clock,
723 u32 ref_div,
724 u32 fb_div,
725 u32 frac_fb_div,
726 u32 post_div)
4eaeca33 727{
4eaeca33
AD
728 struct drm_device *dev = crtc->dev;
729 struct radeon_device *rdev = dev->dev_private;
4eaeca33 730 u8 frev, crev;
37f9003b 731 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
4eaeca33 732 union set_pixel_clock args;
4eaeca33
AD
733
734 memset(&args, 0, sizeof(args));
735
a084e6ee
AD
736 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
737 &crev))
738 return;
771fe6b9
JG
739
740 switch (frev) {
741 case 1:
742 switch (crev) {
743 case 1:
37f9003b
AD
744 if (clock == ATOM_DISABLE)
745 return;
746 args.v1.usPixelClock = cpu_to_le16(clock / 10);
4eaeca33
AD
747 args.v1.usRefDiv = cpu_to_le16(ref_div);
748 args.v1.usFbDiv = cpu_to_le16(fb_div);
749 args.v1.ucFracFbDiv = frac_fb_div;
750 args.v1.ucPostDiv = post_div;
37f9003b
AD
751 args.v1.ucPpll = pll_id;
752 args.v1.ucCRTC = crtc_id;
4eaeca33 753 args.v1.ucRefDivSrc = 1;
771fe6b9
JG
754 break;
755 case 2:
37f9003b 756 args.v2.usPixelClock = cpu_to_le16(clock / 10);
4eaeca33
AD
757 args.v2.usRefDiv = cpu_to_le16(ref_div);
758 args.v2.usFbDiv = cpu_to_le16(fb_div);
759 args.v2.ucFracFbDiv = frac_fb_div;
760 args.v2.ucPostDiv = post_div;
37f9003b
AD
761 args.v2.ucPpll = pll_id;
762 args.v2.ucCRTC = crtc_id;
4eaeca33 763 args.v2.ucRefDivSrc = 1;
771fe6b9
JG
764 break;
765 case 3:
37f9003b 766 args.v3.usPixelClock = cpu_to_le16(clock / 10);
4eaeca33
AD
767 args.v3.usRefDiv = cpu_to_le16(ref_div);
768 args.v3.usFbDiv = cpu_to_le16(fb_div);
769 args.v3.ucFracFbDiv = frac_fb_div;
770 args.v3.ucPostDiv = post_div;
37f9003b
AD
771 args.v3.ucPpll = pll_id;
772 args.v3.ucMiscInfo = (pll_id << 2);
773 args.v3.ucTransmitterId = encoder_id;
bcc1c2a1
AD
774 args.v3.ucEncoderMode = encoder_mode;
775 break;
776 case 5:
37f9003b
AD
777 args.v5.ucCRTC = crtc_id;
778 args.v5.usPixelClock = cpu_to_le16(clock / 10);
bcc1c2a1
AD
779 args.v5.ucRefDiv = ref_div;
780 args.v5.usFbDiv = cpu_to_le16(fb_div);
781 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
782 args.v5.ucPostDiv = post_div;
783 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
37f9003b 784 args.v5.ucTransmitterID = encoder_id;
bcc1c2a1 785 args.v5.ucEncoderMode = encoder_mode;
37f9003b 786 args.v5.ucPpll = pll_id;
771fe6b9
JG
787 break;
788 default:
789 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
790 return;
791 }
792 break;
793 default:
794 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
795 return;
796 }
797
771fe6b9
JG
798 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
799}
800
37f9003b
AD
801static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
802{
803 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
804 struct drm_device *dev = crtc->dev;
805 struct radeon_device *rdev = dev->dev_private;
806 struct drm_encoder *encoder = NULL;
807 struct radeon_encoder *radeon_encoder = NULL;
808 u32 pll_clock = mode->clock;
809 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
810 struct radeon_pll *pll;
811 u32 adjusted_clock;
812 int encoder_mode = 0;
813
814 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
815 if (encoder->crtc == crtc) {
816 radeon_encoder = to_radeon_encoder(encoder);
817 encoder_mode = atombios_get_encoder_mode(encoder);
818 break;
819 }
820 }
821
822 if (!radeon_encoder)
823 return;
824
825 switch (radeon_crtc->pll_id) {
826 case ATOM_PPLL1:
827 pll = &rdev->clock.p1pll;
828 break;
829 case ATOM_PPLL2:
830 pll = &rdev->clock.p2pll;
831 break;
832 case ATOM_DCPLL:
833 case ATOM_PPLL_INVALID:
834 default:
835 pll = &rdev->clock.dcpll;
836 break;
837 }
838
839 /* adjust pixel clock as needed */
840 adjusted_clock = atombios_adjust_pll(crtc, mode, pll);
841
842 radeon_compute_pll(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
843 &ref_div, &post_div);
844
845 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
846 encoder_mode, radeon_encoder->encoder_id, mode->clock,
847 ref_div, fb_div, frac_fb_div, post_div);
848
849}
850
bcc1c2a1
AD
851static int evergreen_crtc_set_base(struct drm_crtc *crtc, int x, int y,
852 struct drm_framebuffer *old_fb)
853{
854 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
855 struct drm_device *dev = crtc->dev;
856 struct radeon_device *rdev = dev->dev_private;
857 struct radeon_framebuffer *radeon_fb;
858 struct drm_gem_object *obj;
859 struct radeon_bo *rbo;
860 uint64_t fb_location;
861 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
862 int r;
863
864 /* no fb bound */
865 if (!crtc->fb) {
d9fdaafb 866 DRM_DEBUG_KMS("No FB bound\n");
bcc1c2a1
AD
867 return 0;
868 }
869
870 radeon_fb = to_radeon_framebuffer(crtc->fb);
871
872 /* Pin framebuffer & get tilling informations */
873 obj = radeon_fb->obj;
874 rbo = obj->driver_private;
875 r = radeon_bo_reserve(rbo, false);
876 if (unlikely(r != 0))
877 return r;
878 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
879 if (unlikely(r != 0)) {
880 radeon_bo_unreserve(rbo);
881 return -EINVAL;
882 }
883 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
884 radeon_bo_unreserve(rbo);
885
886 switch (crtc->fb->bits_per_pixel) {
887 case 8:
888 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
889 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
890 break;
891 case 15:
892 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
893 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
894 break;
895 case 16:
896 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
897 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
898 break;
899 case 24:
900 case 32:
901 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
902 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
903 break;
904 default:
905 DRM_ERROR("Unsupported screen depth %d\n",
906 crtc->fb->bits_per_pixel);
907 return -EINVAL;
908 }
909
97d66328
AD
910 if (tiling_flags & RADEON_TILING_MACRO)
911 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
912 else if (tiling_flags & RADEON_TILING_MICRO)
913 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
914
bcc1c2a1
AD
915 switch (radeon_crtc->crtc_id) {
916 case 0:
917 WREG32(AVIVO_D1VGA_CONTROL, 0);
918 break;
919 case 1:
920 WREG32(AVIVO_D2VGA_CONTROL, 0);
921 break;
922 case 2:
923 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
924 break;
925 case 3:
926 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
927 break;
928 case 4:
929 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
930 break;
931 case 5:
932 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
933 break;
934 default:
935 break;
936 }
937
938 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
939 upper_32_bits(fb_location));
940 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
941 upper_32_bits(fb_location));
942 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
943 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
944 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
945 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
946 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
947
948 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
949 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
950 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
951 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
952 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
953 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
954
955 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
956 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
957 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
958
959 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
960 crtc->mode.vdisplay);
961 x &= ~3;
962 y &= ~1;
963 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
964 (x << 16) | y);
965 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
966 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
967
968 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
969 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset,
970 EVERGREEN_INTERLEAVE_EN);
971 else
972 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
973
974 if (old_fb && old_fb != crtc->fb) {
975 radeon_fb = to_radeon_framebuffer(old_fb);
976 rbo = radeon_fb->obj->driver_private;
977 r = radeon_bo_reserve(rbo, false);
978 if (unlikely(r != 0))
979 return r;
980 radeon_bo_unpin(rbo);
981 radeon_bo_unreserve(rbo);
982 }
983
984 /* Bytes per pixel may have changed */
985 radeon_bandwidth_update(rdev);
986
987 return 0;
988}
989
54f088a9
AD
990static int avivo_crtc_set_base(struct drm_crtc *crtc, int x, int y,
991 struct drm_framebuffer *old_fb)
771fe6b9
JG
992{
993 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
994 struct drm_device *dev = crtc->dev;
995 struct radeon_device *rdev = dev->dev_private;
996 struct radeon_framebuffer *radeon_fb;
997 struct drm_gem_object *obj;
4c788679 998 struct radeon_bo *rbo;
771fe6b9 999 uint64_t fb_location;
e024e110 1000 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
4c788679 1001 int r;
771fe6b9 1002
2de3b484
JG
1003 /* no fb bound */
1004 if (!crtc->fb) {
d9fdaafb 1005 DRM_DEBUG_KMS("No FB bound\n");
2de3b484
JG
1006 return 0;
1007 }
771fe6b9
JG
1008
1009 radeon_fb = to_radeon_framebuffer(crtc->fb);
1010
4c788679 1011 /* Pin framebuffer & get tilling informations */
771fe6b9 1012 obj = radeon_fb->obj;
4c788679
JG
1013 rbo = obj->driver_private;
1014 r = radeon_bo_reserve(rbo, false);
1015 if (unlikely(r != 0))
1016 return r;
1017 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1018 if (unlikely(r != 0)) {
1019 radeon_bo_unreserve(rbo);
771fe6b9
JG
1020 return -EINVAL;
1021 }
4c788679
JG
1022 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1023 radeon_bo_unreserve(rbo);
771fe6b9
JG
1024
1025 switch (crtc->fb->bits_per_pixel) {
41456df2
DA
1026 case 8:
1027 fb_format =
1028 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1029 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1030 break;
771fe6b9
JG
1031 case 15:
1032 fb_format =
1033 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1034 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1035 break;
1036 case 16:
1037 fb_format =
1038 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1039 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1040 break;
1041 case 24:
1042 case 32:
1043 fb_format =
1044 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1045 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1046 break;
1047 default:
1048 DRM_ERROR("Unsupported screen depth %d\n",
1049 crtc->fb->bits_per_pixel);
1050 return -EINVAL;
1051 }
1052
40c4ac1c
AD
1053 if (rdev->family >= CHIP_R600) {
1054 if (tiling_flags & RADEON_TILING_MACRO)
1055 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1056 else if (tiling_flags & RADEON_TILING_MICRO)
1057 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1058 } else {
1059 if (tiling_flags & RADEON_TILING_MACRO)
1060 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
cf2f05d3 1061
40c4ac1c
AD
1062 if (tiling_flags & RADEON_TILING_MICRO)
1063 fb_format |= AVIVO_D1GRPH_TILED;
1064 }
e024e110 1065
771fe6b9
JG
1066 if (radeon_crtc->crtc_id == 0)
1067 WREG32(AVIVO_D1VGA_CONTROL, 0);
1068 else
1069 WREG32(AVIVO_D2VGA_CONTROL, 0);
c290dadf
AD
1070
1071 if (rdev->family >= CHIP_RV770) {
1072 if (radeon_crtc->crtc_id) {
1073 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0);
1074 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0);
1075 } else {
1076 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0);
1077 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0);
1078 }
1079 }
771fe6b9
JG
1080 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1081 (u32) fb_location);
1082 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1083 radeon_crtc->crtc_offset, (u32) fb_location);
1084 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1085
1086 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1087 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1088 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1089 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1090 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
1091 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
1092
1093 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
1094 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1095 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1096
1097 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1098 crtc->mode.vdisplay);
1099 x &= ~3;
1100 y &= ~1;
1101 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1102 (x << 16) | y);
1103 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1104 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
1105
1106 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
1107 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
1108 AVIVO_D1MODE_INTERLEAVE_EN);
1109 else
1110 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
1111
1112 if (old_fb && old_fb != crtc->fb) {
1113 radeon_fb = to_radeon_framebuffer(old_fb);
4c788679
JG
1114 rbo = radeon_fb->obj->driver_private;
1115 r = radeon_bo_reserve(rbo, false);
1116 if (unlikely(r != 0))
1117 return r;
1118 radeon_bo_unpin(rbo);
1119 radeon_bo_unreserve(rbo);
771fe6b9 1120 }
f30f37de
MD
1121
1122 /* Bytes per pixel may have changed */
1123 radeon_bandwidth_update(rdev);
1124
771fe6b9
JG
1125 return 0;
1126}
1127
54f088a9
AD
1128int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1129 struct drm_framebuffer *old_fb)
1130{
1131 struct drm_device *dev = crtc->dev;
1132 struct radeon_device *rdev = dev->dev_private;
1133
bcc1c2a1
AD
1134 if (ASIC_IS_DCE4(rdev))
1135 return evergreen_crtc_set_base(crtc, x, y, old_fb);
1136 else if (ASIC_IS_AVIVO(rdev))
54f088a9
AD
1137 return avivo_crtc_set_base(crtc, x, y, old_fb);
1138 else
1139 return radeon_crtc_set_base(crtc, x, y, old_fb);
1140}
1141
615e0cb6
AD
1142/* properly set additional regs when using atombios */
1143static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1144{
1145 struct drm_device *dev = crtc->dev;
1146 struct radeon_device *rdev = dev->dev_private;
1147 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1148 u32 disp_merge_cntl;
1149
1150 switch (radeon_crtc->crtc_id) {
1151 case 0:
1152 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1153 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1154 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1155 break;
1156 case 1:
1157 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1158 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1159 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1160 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1161 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1162 break;
1163 }
1164}
1165
bcc1c2a1
AD
1166static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1167{
1168 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1169 struct drm_device *dev = crtc->dev;
1170 struct radeon_device *rdev = dev->dev_private;
1171 struct drm_encoder *test_encoder;
1172 struct drm_crtc *test_crtc;
1173 uint32_t pll_in_use = 0;
1174
1175 if (ASIC_IS_DCE4(rdev)) {
1176 /* if crtc is driving DP and we have an ext clock, use that */
1177 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1178 if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1179 if (atombios_get_encoder_mode(test_encoder) == ATOM_ENCODER_MODE_DP) {
1180 if (rdev->clock.dp_extclk)
1181 return ATOM_PPLL_INVALID;
1182 }
1183 }
1184 }
1185
1186 /* otherwise, pick one of the plls */
1187 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1188 struct radeon_crtc *radeon_test_crtc;
1189
1190 if (crtc == test_crtc)
1191 continue;
1192
1193 radeon_test_crtc = to_radeon_crtc(test_crtc);
1194 if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1195 (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1196 pll_in_use |= (1 << radeon_test_crtc->pll_id);
1197 }
1198 if (!(pll_in_use & 1))
1199 return ATOM_PPLL1;
1200 return ATOM_PPLL2;
1201 } else
1202 return radeon_crtc->crtc_id;
1203
1204}
1205
771fe6b9
JG
1206int atombios_crtc_mode_set(struct drm_crtc *crtc,
1207 struct drm_display_mode *mode,
1208 struct drm_display_mode *adjusted_mode,
1209 int x, int y, struct drm_framebuffer *old_fb)
1210{
1211 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1212 struct drm_device *dev = crtc->dev;
1213 struct radeon_device *rdev = dev->dev_private;
771fe6b9
JG
1214
1215 /* TODO color tiling */
771fe6b9 1216
b792210e 1217 atombios_disable_ss(crtc);
bcc1c2a1
AD
1218 /* always set DCPLL */
1219 if (ASIC_IS_DCE4(rdev))
1220 atombios_crtc_set_dcpll(crtc);
771fe6b9 1221 atombios_crtc_set_pll(crtc, adjusted_mode);
b792210e 1222 atombios_enable_ss(crtc);
771fe6b9 1223
5b1714d3 1224 if (ASIC_IS_AVIVO(rdev))
bcc1c2a1 1225 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
771fe6b9 1226 else {
bcc1c2a1 1227 atombios_crtc_set_timing(crtc, adjusted_mode);
5a9bcacc
AD
1228 if (radeon_crtc->crtc_id == 0)
1229 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
615e0cb6 1230 radeon_legacy_atom_fixup(crtc);
771fe6b9 1231 }
bcc1c2a1 1232 atombios_crtc_set_base(crtc, x, y, old_fb);
c93bb85b
JG
1233 atombios_overscan_setup(crtc, mode, adjusted_mode);
1234 atombios_scaler_setup(crtc);
771fe6b9
JG
1235 return 0;
1236}
1237
1238static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1239 struct drm_display_mode *mode,
1240 struct drm_display_mode *adjusted_mode)
1241{
03214bd5
AD
1242 struct drm_device *dev = crtc->dev;
1243 struct radeon_device *rdev = dev->dev_private;
1244
1245 /* adjust pm to upcoming mode change */
1246 radeon_pm_compute_clocks(rdev);
1247
c93bb85b
JG
1248 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1249 return false;
771fe6b9
JG
1250 return true;
1251}
1252
1253static void atombios_crtc_prepare(struct drm_crtc *crtc)
1254{
267364ac
AD
1255 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1256
1257 /* pick pll */
1258 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1259
37b4390e 1260 atombios_lock_crtc(crtc, ATOM_ENABLE);
a348c84d 1261 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
771fe6b9
JG
1262}
1263
1264static void atombios_crtc_commit(struct drm_crtc *crtc)
1265{
1266 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
37b4390e 1267 atombios_lock_crtc(crtc, ATOM_DISABLE);
771fe6b9
JG
1268}
1269
37f9003b
AD
1270static void atombios_crtc_disable(struct drm_crtc *crtc)
1271{
1272 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1273 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1274
1275 switch (radeon_crtc->pll_id) {
1276 case ATOM_PPLL1:
1277 case ATOM_PPLL2:
1278 /* disable the ppll */
1279 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1280 0, 0, ATOM_DISABLE, 0, 0, 0, 0);
1281 break;
1282 default:
1283 break;
1284 }
1285 radeon_crtc->pll_id = -1;
1286}
1287
771fe6b9
JG
1288static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
1289 .dpms = atombios_crtc_dpms,
1290 .mode_fixup = atombios_crtc_mode_fixup,
1291 .mode_set = atombios_crtc_mode_set,
1292 .mode_set_base = atombios_crtc_set_base,
1293 .prepare = atombios_crtc_prepare,
1294 .commit = atombios_crtc_commit,
068143d3 1295 .load_lut = radeon_crtc_load_lut,
37f9003b 1296 .disable = atombios_crtc_disable,
771fe6b9
JG
1297};
1298
1299void radeon_atombios_init_crtc(struct drm_device *dev,
1300 struct radeon_crtc *radeon_crtc)
1301{
bcc1c2a1
AD
1302 struct radeon_device *rdev = dev->dev_private;
1303
1304 if (ASIC_IS_DCE4(rdev)) {
1305 switch (radeon_crtc->crtc_id) {
1306 case 0:
1307 default:
12d7798f 1308 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
bcc1c2a1
AD
1309 break;
1310 case 1:
12d7798f 1311 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
bcc1c2a1
AD
1312 break;
1313 case 2:
12d7798f 1314 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
bcc1c2a1
AD
1315 break;
1316 case 3:
12d7798f 1317 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
bcc1c2a1
AD
1318 break;
1319 case 4:
12d7798f 1320 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
bcc1c2a1
AD
1321 break;
1322 case 5:
12d7798f 1323 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
bcc1c2a1
AD
1324 break;
1325 }
1326 } else {
1327 if (radeon_crtc->crtc_id == 1)
1328 radeon_crtc->crtc_offset =
1329 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1330 else
1331 radeon_crtc->crtc_offset = 0;
1332 }
1333 radeon_crtc->pll_id = -1;
771fe6b9
JG
1334 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
1335}