atombios_crtc.c 36.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
/*
 * Copyright 2007-8 Advanced Micro Devices, Inc.
 * Copyright 2008 Red Hat Inc.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
 * OTHER DEALINGS IN THE SOFTWARE.
 *
 * Authors: Dave Airlie
 *          Alex Deucher
 */
#include <drm/drmP.h>
#include <drm/drm_crtc_helper.h>
#include <drm/radeon_drm.h>
29
#include <drm/drm_fixed.h>
30
31
32
33
#include "radeon.h"
#include "atom.h"
#include "atom-bits.h"

34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
static void atombios_overscan_setup(struct drm_crtc *crtc,
				    struct drm_display_mode *mode,
				    struct drm_display_mode *adjusted_mode)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	SET_CRTC_OVERSCAN_PS_ALLOCATION args;
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
	int a1, a2;

	memset(&args, 0, sizeof(args));

	args.usOverscanRight = 0;
	args.usOverscanLeft = 0;
	args.usOverscanBottom = 0;
	args.usOverscanTop = 0;
	args.ucCRTC = radeon_crtc->crtc_id;

	switch (radeon_crtc->rmx_type) {
	case RMX_CENTER:
		args.usOverscanTop = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
		args.usOverscanBottom = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2;
		args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
		args.usOverscanRight = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2;
		atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
		break;
	case RMX_ASPECT:
		a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
		a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;

		if (a1 > a2) {
			args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
			args.usOverscanRight = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2;
		} else if (a2 > a1) {
			args.usOverscanLeft = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
			args.usOverscanRight = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2;
		}
		atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
		break;
	case RMX_FULL:
	default:
		args.usOverscanRight = 0;
		args.usOverscanLeft = 0;
		args.usOverscanBottom = 0;
		args.usOverscanTop = 0;
		atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
		break;
	}
}

static void atombios_scaler_setup(struct drm_crtc *crtc)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	ENABLE_SCALER_PS_ALLOCATION args;
	int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
92

93
94
	/* fixme - fill in enc_priv for atom dac */
	enum radeon_tv_std tv_std = TV_STD_NTSC;
95
96
	bool is_tv = false, is_cv = false;
	struct drm_encoder *encoder;
97
98
99
100

	if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
		return;

101
102
103
104
105
106
107
108
109
110
111
112
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
		/* find tv std */
		if (encoder->crtc == crtc) {
			struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
			if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
				struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
				tv_std = tv_dac->tv_std;
				is_tv = true;
			}
		}
	}

113
114
115
116
	memset(&args, 0, sizeof(args));

	args.ucScaler = radeon_crtc->crtc_id;

117
	if (is_tv) {
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
		switch (tv_std) {
		case TV_STD_NTSC:
		default:
			args.ucTVStandard = ATOM_TV_NTSC;
			break;
		case TV_STD_PAL:
			args.ucTVStandard = ATOM_TV_PAL;
			break;
		case TV_STD_PAL_M:
			args.ucTVStandard = ATOM_TV_PALM;
			break;
		case TV_STD_PAL_60:
			args.ucTVStandard = ATOM_TV_PAL60;
			break;
		case TV_STD_NTSC_J:
			args.ucTVStandard = ATOM_TV_NTSCJ;
			break;
		case TV_STD_SCART_PAL:
			args.ucTVStandard = ATOM_TV_PAL; /* ??? */
			break;
		case TV_STD_SECAM:
			args.ucTVStandard = ATOM_TV_SECAM;
			break;
		case TV_STD_PAL_CN:
			args.ucTVStandard = ATOM_TV_PALCN;
			break;
		}
		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
146
	} else if (is_cv) {
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
		args.ucTVStandard = ATOM_TV_CV;
		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
	} else {
		switch (radeon_crtc->rmx_type) {
		case RMX_FULL:
			args.ucEnable = ATOM_SCALER_EXPANSION;
			break;
		case RMX_CENTER:
			args.ucEnable = ATOM_SCALER_CENTER;
			break;
		case RMX_ASPECT:
			args.ucEnable = ATOM_SCALER_EXPANSION;
			break;
		default:
			if (ASIC_IS_AVIVO(rdev))
				args.ucEnable = ATOM_SCALER_DISABLE;
			else
				args.ucEnable = ATOM_SCALER_CENTER;
			break;
		}
	}
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
169
170
171
	if ((is_tv || is_cv)
	    && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
		atom_rv515_force_tv_scaler(rdev, radeon_crtc);
172
173
174
	}
}

175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	int index =
	    GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
	ENABLE_CRTC_PS_ALLOCATION args;

	memset(&args, 0, sizeof(args));

	args.ucCRTC = radeon_crtc->crtc_id;
	args.ucEnable = lock;

	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
	ENABLE_CRTC_PS_ALLOCATION args;

	memset(&args, 0, sizeof(args));

	args.ucCRTC = radeon_crtc->crtc_id;
	args.ucEnable = state;

	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
	ENABLE_CRTC_PS_ALLOCATION args;

	memset(&args, 0, sizeof(args));

	args.ucCRTC = radeon_crtc->crtc_id;
	args.ucEnable = state;

	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
	BLANK_CRTC_PS_ALLOCATION args;

	memset(&args, 0, sizeof(args));

	args.ucCRTC = radeon_crtc->crtc_id;
	args.ucBlanking = state;

	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
244
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
245
246
247

	switch (mode) {
	case DRM_MODE_DPMS_ON:
248
249
250
		radeon_crtc->enabled = true;
		/* adjust pm to dpms changes BEFORE enabling crtcs */
		radeon_pm_compute_clocks(rdev);
251
		atombios_enable_crtc(crtc, ATOM_ENABLE);
252
		if (ASIC_IS_DCE3(rdev))
253
254
			atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
		atombios_blank_crtc(crtc, ATOM_DISABLE);
255
		drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
256
		radeon_crtc_load_lut(crtc);
257
258
259
260
		break;
	case DRM_MODE_DPMS_STANDBY:
	case DRM_MODE_DPMS_SUSPEND:
	case DRM_MODE_DPMS_OFF:
261
		drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
262
		atombios_blank_crtc(crtc, ATOM_ENABLE);
263
		if (ASIC_IS_DCE3(rdev))
264
265
			atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
		atombios_enable_crtc(crtc, ATOM_DISABLE);
266
		radeon_crtc->enabled = false;
267
268
		/* adjust pm to dpms changes AFTER disabling crtcs */
		radeon_pm_compute_clocks(rdev);
269
270
271
272
273
274
		break;
	}
}

static void
atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
275
			     struct drm_display_mode *mode)
276
{
277
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
278
279
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
280
	SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
281
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
282
	u16 misc = 0;
283

284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
	memset(&args, 0, sizeof(args));
	args.usH_Size = cpu_to_le16(mode->crtc_hdisplay);
	args.usH_Blanking_Time =
		cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay);
	args.usV_Size = cpu_to_le16(mode->crtc_vdisplay);
	args.usV_Blanking_Time =
	    cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay);
	args.usH_SyncOffset =
		cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay);
	args.usH_SyncWidth =
		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
	args.usV_SyncOffset =
		cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay);
	args.usV_SyncWidth =
		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
	/*args.ucH_Border = mode->hborder;*/
	/*args.ucV_Border = mode->vborder;*/

	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
		misc |= ATOM_VSYNC_POLARITY;
	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
		misc |= ATOM_HSYNC_POLARITY;
	if (mode->flags & DRM_MODE_FLAG_CSYNC)
		misc |= ATOM_COMPOSITESYNC;
	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
		misc |= ATOM_INTERLACE;
	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
		misc |= ATOM_DOUBLE_CLOCK_MODE;

	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
	args.ucCRTC = radeon_crtc->crtc_id;
315

316
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
317
318
}

319
320
static void atombios_crtc_set_timing(struct drm_crtc *crtc,
				     struct drm_display_mode *mode)
321
{
322
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
323
324
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
325
	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
326
	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
327
	u16 misc = 0;
328

329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
	memset(&args, 0, sizeof(args));
	args.usH_Total = cpu_to_le16(mode->crtc_htotal);
	args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
	args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
	args.usH_SyncWidth =
		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
	args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
	args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
	args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
	args.usV_SyncWidth =
		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);

	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
		misc |= ATOM_VSYNC_POLARITY;
	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
		misc |= ATOM_HSYNC_POLARITY;
	if (mode->flags & DRM_MODE_FLAG_CSYNC)
		misc |= ATOM_COMPOSITESYNC;
	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
		misc |= ATOM_INTERLACE;
	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
		misc |= ATOM_DOUBLE_CLOCK_MODE;

	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
	args.ucCRTC = radeon_crtc->crtc_id;
354

355
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
356
357
}

358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
static void atombios_disable_ss(struct drm_crtc *crtc)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	u32 ss_cntl;

	if (ASIC_IS_DCE4(rdev)) {
		switch (radeon_crtc->pll_id) {
		case ATOM_PPLL1:
			ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
			WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
			break;
		case ATOM_PPLL2:
			ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
			WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
			break;
		case ATOM_DCPLL:
		case ATOM_PPLL_INVALID:
			return;
		}
	} else if (ASIC_IS_AVIVO(rdev)) {
		switch (radeon_crtc->pll_id) {
		case ATOM_PPLL1:
			ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
			ss_cntl &= ~1;
			WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
			break;
		case ATOM_PPLL2:
			ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
			ss_cntl &= ~1;
			WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
			break;
		case ATOM_DCPLL:
		case ATOM_PPLL_INVALID:
			return;
		}
	}
}


401
402
403
404
405
union atom_enable_ss {
	ENABLE_LVDS_SS_PARAMETERS legacy;
	ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
};

406
static void atombios_enable_ss(struct drm_crtc *crtc)
407
408
409
410
411
412
413
414
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct drm_encoder *encoder = NULL;
	struct radeon_encoder *radeon_encoder = NULL;
	struct radeon_encoder_atom_dig *dig = NULL;
	int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
415
	union atom_enable_ss args;
416
417
418
	uint16_t percentage = 0;
	uint8_t type = 0, step = 0, delay = 0, range = 0;

419
420
421
422
	/* XXX add ss support for DCE4 */
	if (ASIC_IS_DCE4(rdev))
		return;

423
424
425
426
	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
		if (encoder->crtc == crtc) {
			radeon_encoder = to_radeon_encoder(encoder);
			/* only enable spread spectrum on LVDS */
427
428
429
430
431
432
433
434
			if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
				dig = radeon_encoder->enc_priv;
				if (dig && dig->ss) {
					percentage = dig->ss->percentage;
					type = dig->ss->type;
					step = dig->ss->step;
					delay = dig->ss->delay;
					range = dig->ss->range;
435
				} else
436
					return;
437
			} else
438
439
440
441
442
443
444
445
				return;
			break;
		}
	}

	if (!radeon_encoder)
		return;

446
	memset(&args, 0, sizeof(args));
447
	if (ASIC_IS_AVIVO(rdev)) {
448
449
450
451
452
453
		args.v1.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
		args.v1.ucSpreadSpectrumType = type;
		args.v1.ucSpreadSpectrumStep = step;
		args.v1.ucSpreadSpectrumDelay = delay;
		args.v1.ucSpreadSpectrumRange = range;
		args.v1.ucPpll = radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
454
		args.v1.ucEnable = ATOM_ENABLE;
455
	} else {
456
457
458
459
		args.legacy.usSpreadSpectrumPercentage = cpu_to_le16(percentage);
		args.legacy.ucSpreadSpectrumType = type;
		args.legacy.ucSpreadSpectrumStepSize_Delay = (step & 3) << 2;
		args.legacy.ucSpreadSpectrumStepSize_Delay |= (delay & 7) << 4;
460
		args.legacy.ucEnable = ATOM_ENABLE;
461
	}
462
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
463
464
}

465
466
union adjust_pixel_clock {
	ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
467
	ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
468
469
470
471
472
};

static u32 atombios_adjust_pll(struct drm_crtc *crtc,
			       struct drm_display_mode *mode,
			       struct radeon_pll *pll)
473
474
475
476
477
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct drm_encoder *encoder = NULL;
	struct radeon_encoder *radeon_encoder = NULL;
478
	u32 adjusted_clock = mode->clock;
479
	int encoder_mode = 0;
480

481
482
	/* reset the pll flags */
	pll->flags = 0;
483

484
485
	/* select the PLL algo */
	if (ASIC_IS_AVIVO(rdev)) {
486
487
488
489
490
491
492
		if (radeon_new_pll == 0)
			pll->algo = PLL_ALGO_LEGACY;
		else
			pll->algo = PLL_ALGO_NEW;
	} else {
		if (radeon_new_pll == 1)
			pll->algo = PLL_ALGO_NEW;
493
494
		else
			pll->algo = PLL_ALGO_LEGACY;
495
	}
496

497
	if (ASIC_IS_AVIVO(rdev)) {
498
499
500
		if ((rdev->family == CHIP_RS600) ||
		    (rdev->family == CHIP_RS690) ||
		    (rdev->family == CHIP_RS740))
501
502
			pll->flags |= (RADEON_PLL_USE_FRAC_FB_DIV |
				       RADEON_PLL_PREFER_CLOSEST_LOWER);
503

504
		if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)	/* range limits??? */
505
			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
506
		else
507
			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
508
	} else {
509
		pll->flags |= RADEON_PLL_LEGACY;
510
511

		if (mode->clock > 200000)	/* range limits??? */
512
			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
513
		else
514
			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
515
516
517
518
519

	}

	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
		if (encoder->crtc == crtc) {
520
			radeon_encoder = to_radeon_encoder(encoder);
521
			encoder_mode = atombios_get_encoder_mode(encoder);
522
523
524
525
			if (ASIC_IS_AVIVO(rdev)) {
				/* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
				if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
					adjusted_clock = mode->clock * 2;
526
527
528
529
				if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) {
					pll->algo = PLL_ALGO_LEGACY;
					pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
				}
530
531
			} else {
				if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
532
					pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
533
				if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
534
					pll->flags |= RADEON_PLL_USE_REF_DIV;
535
			}
536
			break;
537
538
539
		}
	}

540
541
542
543
544
	/* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
	 * accordingly based on the encoder/transmitter to work around
	 * special hw requirements.
	 */
	if (ASIC_IS_DCE3(rdev)) {
545
546
547
		union adjust_pixel_clock args;
		u8 frev, crev;
		int index;
548
549

		index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
550
551
552
		if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
					   &crev))
			return adjusted_clock;
553
554
555
556
557
558
559
560
561
562

		memset(&args, 0, sizeof(args));

		switch (frev) {
		case 1:
			switch (crev) {
			case 1:
			case 2:
				args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
				args.v1.ucTransmitterID = radeon_encoder->encoder_id;
563
				args.v1.ucEncodeMode = encoder_mode;
564
565
566
567
568

				atom_execute_table(rdev->mode_info.atom_context,
						   index, (uint32_t *)&args);
				adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
				break;
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
			case 3:
				args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
				args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
				args.v3.sInput.ucEncodeMode = encoder_mode;
				args.v3.sInput.ucDispPllConfig = 0;
				if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
					struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;

					if (encoder_mode == ATOM_ENCODER_MODE_DP)
						args.v3.sInput.ucDispPllConfig |=
							DISPPLL_CONFIG_COHERENT_MODE;
					else {
						if (dig->coherent_mode)
							args.v3.sInput.ucDispPllConfig |=
								DISPPLL_CONFIG_COHERENT_MODE;
						if (mode->clock > 165000)
							args.v3.sInput.ucDispPllConfig |=
								DISPPLL_CONFIG_DUAL_LINK;
					}
				} else if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
					/* may want to enable SS on DP/eDP eventually */
590
591
592
					/*args.v3.sInput.ucDispPllConfig |=
						DISPPLL_CONFIG_SS_ENABLE;*/
					if (encoder_mode == ATOM_ENCODER_MODE_DP)
593
						args.v3.sInput.ucDispPllConfig |=
594
595
596
597
598
599
							DISPPLL_CONFIG_COHERENT_MODE;
					else {
						if (mode->clock > 165000)
							args.v3.sInput.ucDispPllConfig |=
								DISPPLL_CONFIG_DUAL_LINK;
					}
600
601
602
603
604
605
606
607
608
609
610
611
612
				}
				atom_execute_table(rdev->mode_info.atom_context,
						   index, (uint32_t *)&args);
				adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
				if (args.v3.sOutput.ucRefDiv) {
					pll->flags |= RADEON_PLL_USE_REF_DIV;
					pll->reference_div = args.v3.sOutput.ucRefDiv;
				}
				if (args.v3.sOutput.ucPostDiv) {
					pll->flags |= RADEON_PLL_USE_POST_DIV;
					pll->post_div = args.v3.sOutput.ucPostDiv;
				}
				break;
613
614
615
616
617
618
619
620
621
			default:
				DRM_ERROR("Unknown table version %d %d\n", frev, crev);
				return adjusted_clock;
			}
			break;
		default:
			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
			return adjusted_clock;
		}
622
	}
623
624
625
626
627
628
629
630
	return adjusted_clock;
}

union set_pixel_clock {
	SET_PIXEL_CLOCK_PS_ALLOCATION base;
	PIXEL_CLOCK_PARAMETERS v1;
	PIXEL_CLOCK_PARAMETERS_V2 v2;
	PIXEL_CLOCK_PARAMETERS_V3 v3;
631
	PIXEL_CLOCK_PARAMETERS_V5 v5;
632
633
};

634
635
636
637
638
639
640
641
642
643
644
static void atombios_crtc_set_dcpll(struct drm_crtc *crtc)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	u8 frev, crev;
	int index;
	union set_pixel_clock args;

	memset(&args, 0, sizeof(args));

	index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
645
646
647
	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
				   &crev))
		return;
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672

	switch (frev) {
	case 1:
		switch (crev) {
		case 5:
			/* if the default dcpll clock is specified,
			 * SetPixelClock provides the dividers
			 */
			args.v5.ucCRTC = ATOM_CRTC_INVALID;
			args.v5.usPixelClock = rdev->clock.default_dispclk;
			args.v5.ucPpll = ATOM_DCPLL;
			break;
		default:
			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
			return;
		}
		break;
	default:
		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
		return;
	}
	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
673
674
675
676
677
678
679
680
681
682
683
684
685
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct drm_encoder *encoder = NULL;
	struct radeon_encoder *radeon_encoder = NULL;
	u8 frev, crev;
	int index;
	union set_pixel_clock args;
	u32 pll_clock = mode->clock;
	u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
	struct radeon_pll *pll;
	u32 adjusted_clock;
686
	int encoder_mode = 0;
687
688
689
690
691
692

	memset(&args, 0, sizeof(args));

	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
		if (encoder->crtc == crtc) {
			radeon_encoder = to_radeon_encoder(encoder);
693
			encoder_mode = atombios_get_encoder_mode(encoder);
694
695
696
697
698
699
700
			break;
		}
	}

	if (!radeon_encoder)
		return;

701
702
	switch (radeon_crtc->pll_id) {
	case ATOM_PPLL1:
703
		pll = &rdev->clock.p1pll;
704
705
		break;
	case ATOM_PPLL2:
706
		pll = &rdev->clock.p2pll;
707
708
709
		break;
	case ATOM_DCPLL:
	case ATOM_PPLL_INVALID:
710
	default:
711
712
713
		pll = &rdev->clock.dcpll;
		break;
	}
714
715
716

	/* adjust pixel clock as needed */
	adjusted_clock = atombios_adjust_pll(crtc, mode, pll);
717

718
719
	radeon_compute_pll(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
			   &ref_div, &post_div);
720

721
	index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
722
723
724
	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
				   &crev))
		return;
725
726
727
728
729

	switch (frev) {
	case 1:
		switch (crev) {
		case 1:
730
731
732
733
734
			args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
			args.v1.usRefDiv = cpu_to_le16(ref_div);
			args.v1.usFbDiv = cpu_to_le16(fb_div);
			args.v1.ucFracFbDiv = frac_fb_div;
			args.v1.ucPostDiv = post_div;
735
			args.v1.ucPpll = radeon_crtc->pll_id;
736
737
			args.v1.ucCRTC = radeon_crtc->crtc_id;
			args.v1.ucRefDivSrc = 1;
738
739
			break;
		case 2:
740
741
742
743
744
			args.v2.usPixelClock = cpu_to_le16(mode->clock / 10);
			args.v2.usRefDiv = cpu_to_le16(ref_div);
			args.v2.usFbDiv = cpu_to_le16(fb_div);
			args.v2.ucFracFbDiv = frac_fb_div;
			args.v2.ucPostDiv = post_div;
745
			args.v2.ucPpll = radeon_crtc->pll_id;
746
747
			args.v2.ucCRTC = radeon_crtc->crtc_id;
			args.v2.ucRefDivSrc = 1;
748
749
			break;
		case 3:
750
751
752
753
754
			args.v3.usPixelClock = cpu_to_le16(mode->clock / 10);
			args.v3.usRefDiv = cpu_to_le16(ref_div);
			args.v3.usFbDiv = cpu_to_le16(fb_div);
			args.v3.ucFracFbDiv = frac_fb_div;
			args.v3.ucPostDiv = post_div;
755
756
			args.v3.ucPpll = radeon_crtc->pll_id;
			args.v3.ucMiscInfo = (radeon_crtc->pll_id << 2);
757
			args.v3.ucTransmitterId = radeon_encoder->encoder_id;
758
759
760
761
762
763
764
765
766
767
768
769
770
			args.v3.ucEncoderMode = encoder_mode;
			break;
		case 5:
			args.v5.ucCRTC = radeon_crtc->crtc_id;
			args.v5.usPixelClock = cpu_to_le16(mode->clock / 10);
			args.v5.ucRefDiv = ref_div;
			args.v5.usFbDiv = cpu_to_le16(fb_div);
			args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
			args.v5.ucPostDiv = post_div;
			args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
			args.v5.ucTransmitterID = radeon_encoder->encoder_id;
			args.v5.ucEncoderMode = encoder_mode;
			args.v5.ucPpll = radeon_crtc->pll_id;
771
772
773
774
775
776
777
778
779
780
781
782
783
784
			break;
		default:
			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
			return;
		}
		break;
	default:
		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
		return;
	}

	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
}

785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
static int evergreen_crtc_set_base(struct drm_crtc *crtc, int x, int y,
				   struct drm_framebuffer *old_fb)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct radeon_framebuffer *radeon_fb;
	struct drm_gem_object *obj;
	struct radeon_bo *rbo;
	uint64_t fb_location;
	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
	int r;

	/* no fb bound */
	if (!crtc->fb) {
		DRM_DEBUG("No FB bound\n");
		return 0;
	}

	radeon_fb = to_radeon_framebuffer(crtc->fb);

	/* Pin framebuffer & get tilling informations */
	obj = radeon_fb->obj;
	rbo = obj->driver_private;
	r = radeon_bo_reserve(rbo, false);
	if (unlikely(r != 0))
		return r;
	r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
	if (unlikely(r != 0)) {
		radeon_bo_unreserve(rbo);
		return -EINVAL;
	}
	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
	radeon_bo_unreserve(rbo);

	switch (crtc->fb->bits_per_pixel) {
	case 8:
		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
		break;
	case 15:
		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
		break;
	case 16:
		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
		break;
	case 24:
	case 32:
		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
		break;
	default:
		DRM_ERROR("Unsupported screen depth %d\n",
			  crtc->fb->bits_per_pixel);
		return -EINVAL;
	}

	switch (radeon_crtc->crtc_id) {
	case 0:
		WREG32(AVIVO_D1VGA_CONTROL, 0);
		break;
	case 1:
		WREG32(AVIVO_D2VGA_CONTROL, 0);
		break;
	case 2:
		WREG32(EVERGREEN_D3VGA_CONTROL, 0);
		break;
	case 3:
		WREG32(EVERGREEN_D4VGA_CONTROL, 0);
		break;
	case 4:
		WREG32(EVERGREEN_D5VGA_CONTROL, 0);
		break;
	case 5:
		WREG32(EVERGREEN_D6VGA_CONTROL, 0);
		break;
	default:
		break;
	}

	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
	       upper_32_bits(fb_location));
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
	       upper_32_bits(fb_location));
	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
	       (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
	       (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
	WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);

	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
	WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
	WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
	WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
	WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);

	fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
	WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
	WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);

	WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
	       crtc->mode.vdisplay);
	x &= ~3;
	y &= ~1;
	WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
	       (x << 16) | y);
	WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
	       (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);

	if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
		WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset,
		       EVERGREEN_INTERLEAVE_EN);
	else
		WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset, 0);

	if (old_fb && old_fb != crtc->fb) {
		radeon_fb = to_radeon_framebuffer(old_fb);
		rbo = radeon_fb->obj->driver_private;
		r = radeon_bo_reserve(rbo, false);
		if (unlikely(r != 0))
			return r;
		radeon_bo_unpin(rbo);
		radeon_bo_unreserve(rbo);
	}

	/* Bytes per pixel may have changed */
	radeon_bandwidth_update(rdev);

	return 0;
}

919
920
static int avivo_crtc_set_base(struct drm_crtc *crtc, int x, int y,
			       struct drm_framebuffer *old_fb)
921
922
923
924
925
926
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct radeon_framebuffer *radeon_fb;
	struct drm_gem_object *obj;
927
	struct radeon_bo *rbo;
928
	uint64_t fb_location;
929
	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
930
	int r;
931

932
933
934
935
936
	/* no fb bound */
	if (!crtc->fb) {
		DRM_DEBUG("No FB bound\n");
		return 0;
	}
937
938
939

	radeon_fb = to_radeon_framebuffer(crtc->fb);

940
	/* Pin framebuffer & get tilling informations */
941
	obj = radeon_fb->obj;
942
943
944
945
946
947
948
	rbo = obj->driver_private;
	r = radeon_bo_reserve(rbo, false);
	if (unlikely(r != 0))
		return r;
	r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
	if (unlikely(r != 0)) {
		radeon_bo_unreserve(rbo);
949
950
		return -EINVAL;
	}
951
952
	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
	radeon_bo_unreserve(rbo);
953
954

	switch (crtc->fb->bits_per_pixel) {
955
956
957
958
959
	case 8:
		fb_format =
		    AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
		    AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
		break;
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
	case 15:
		fb_format =
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
		    AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
		break;
	case 16:
		fb_format =
		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
		    AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
		break;
	case 24:
	case 32:
		fb_format =
		    AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
		    AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
		break;
	default:
		DRM_ERROR("Unsupported screen depth %d\n",
			  crtc->fb->bits_per_pixel);
		return -EINVAL;
	}

982
983
984
	if (tiling_flags & RADEON_TILING_MACRO)
		fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;

985
986
987
	if (tiling_flags & RADEON_TILING_MICRO)
		fb_format |= AVIVO_D1GRPH_TILED;

988
989
990
991
	if (radeon_crtc->crtc_id == 0)
		WREG32(AVIVO_D1VGA_CONTROL, 0);
	else
		WREG32(AVIVO_D2VGA_CONTROL, 0);
992
993
994
995
996
997
998
999
1000
1001

	if (rdev->family >= CHIP_RV770) {
		if (radeon_crtc->crtc_id) {
			WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0);
			WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0);
		} else {
			WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0);
			WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0);
		}
	}
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
	WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
	       (u32) fb_location);
	WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
	       radeon_crtc->crtc_offset, (u32) fb_location);
	WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);

	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
	WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
	WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
	WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
	WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);

	fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
	WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
	WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);

	WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
	       crtc->mode.vdisplay);
	x &= ~3;
	y &= ~1;
	WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
	       (x << 16) | y);
	WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
	       (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);

	if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
		       AVIVO_D1MODE_INTERLEAVE_EN);
	else
		WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0);

	if (old_fb && old_fb != crtc->fb) {
		radeon_fb = to_radeon_framebuffer(old_fb);
1036
1037
1038
1039
1040
1041
		rbo = radeon_fb->obj->driver_private;
		r = radeon_bo_reserve(rbo, false);
		if (unlikely(r != 0))
			return r;
		radeon_bo_unpin(rbo);
		radeon_bo_unreserve(rbo);
1042
	}
1043
1044
1045
1046

	/* Bytes per pixel may have changed */
	radeon_bandwidth_update(rdev);

1047
1048
1049
	return 0;
}

1050
1051
1052
1053
1054
1055
int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
			   struct drm_framebuffer *old_fb)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;

1056
1057
1058
	if (ASIC_IS_DCE4(rdev))
		return evergreen_crtc_set_base(crtc, x, y, old_fb);
	else if (ASIC_IS_AVIVO(rdev))
1059
1060
1061
1062
1063
		return avivo_crtc_set_base(crtc, x, y, old_fb);
	else
		return radeon_crtc_set_base(crtc, x, y, old_fb);
}

1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
/* properly set additional regs when using atombios */
static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
{
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	u32 disp_merge_cntl;

	switch (radeon_crtc->crtc_id) {
	case 0:
		disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
		disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
		WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
		break;
	case 1:
		disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
		disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
		WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
		WREG32(RADEON_FP_H2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
		WREG32(RADEON_FP_V2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
		break;
	}
}

1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
static int radeon_atom_pick_pll(struct drm_crtc *crtc)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;
	struct drm_encoder *test_encoder;
	struct drm_crtc *test_crtc;
	uint32_t pll_in_use = 0;

	if (ASIC_IS_DCE4(rdev)) {
		/* if crtc is driving DP and we have an ext clock, use that */
		list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
			if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
				if (atombios_get_encoder_mode(test_encoder) == ATOM_ENCODER_MODE_DP) {
					if (rdev->clock.dp_extclk)
						return ATOM_PPLL_INVALID;
				}
			}
		}

		/* otherwise, pick one of the plls */
		list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
			struct radeon_crtc *radeon_test_crtc;

			if (crtc == test_crtc)
				continue;

			radeon_test_crtc = to_radeon_crtc(test_crtc);
			if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
			    (radeon_test_crtc->pll_id <= ATOM_PPLL2))
				pll_in_use |= (1 << radeon_test_crtc->pll_id);
		}
		if (!(pll_in_use & 1))
			return ATOM_PPLL1;
		return ATOM_PPLL2;
	} else
		return radeon_crtc->crtc_id;

}

1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
int atombios_crtc_mode_set(struct drm_crtc *crtc,
			   struct drm_display_mode *mode,
			   struct drm_display_mode *adjusted_mode,
			   int x, int y, struct drm_framebuffer *old_fb)
{
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;

	/* TODO color tiling */

1139
	atombios_disable_ss(crtc);
1140
1141
1142
	/* always set DCPLL */
	if (ASIC_IS_DCE4(rdev))
		atombios_crtc_set_dcpll(crtc);
1143
	atombios_crtc_set_pll(crtc, adjusted_mode);
1144
	atombios_enable_ss(crtc);
1145

1146
1147
1148
1149
	if (ASIC_IS_DCE4(rdev))
		atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
	else if (ASIC_IS_AVIVO(rdev))
		atombios_crtc_set_timing(crtc, adjusted_mode);
1150
	else {
1151
		atombios_crtc_set_timing(crtc, adjusted_mode);
1152
1153
		if (radeon_crtc->crtc_id == 0)
			atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1154
		radeon_legacy_atom_fixup(crtc);
1155
	}
1156
	atombios_crtc_set_base(crtc, x, y, old_fb);
1157
1158
	atombios_overscan_setup(crtc, mode, adjusted_mode);
	atombios_scaler_setup(crtc);
1159
1160
1161
1162
1163
1164
1165
	return 0;
}

static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
				     struct drm_display_mode *mode,
				     struct drm_display_mode *adjusted_mode)
{
1166
1167
1168
1169
1170
1171
	struct drm_device *dev = crtc->dev;
	struct radeon_device *rdev = dev->dev_private;

	/* adjust pm to upcoming mode change */
	radeon_pm_compute_clocks(rdev);

1172
1173
	if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
		return false;
1174
1175
1176
1177
1178
	return true;
}

static void atombios_crtc_prepare(struct drm_crtc *crtc)
{
1179
1180
1181
1182
1183
	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);

	/* pick pll */
	radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);

1184
	atombios_lock_crtc(crtc, ATOM_ENABLE);
1185
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1186
1187
1188
1189
1190
}

static void atombios_crtc_commit(struct drm_crtc *crtc)
{
	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1191
	atombios_lock_crtc(crtc, ATOM_DISABLE);
1192
1193
1194
1195
1196
1197
1198
1199
1200
}

static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
	.dpms = atombios_crtc_dpms,
	.mode_fixup = atombios_crtc_mode_fixup,
	.mode_set = atombios_crtc_mode_set,
	.mode_set_base = atombios_crtc_set_base,
	.prepare = atombios_crtc_prepare,
	.commit = atombios_crtc_commit,
1201
	.load_lut = radeon_crtc_load_lut,
1202
1203
1204
1205
1206
};

void radeon_atombios_init_crtc(struct drm_device *dev,
			       struct radeon_crtc *radeon_crtc)
{
1207
1208
1209
1210
1211
1212
	struct radeon_device *rdev = dev->dev_private;

	if (ASIC_IS_DCE4(rdev)) {
		switch (radeon_crtc->crtc_id) {
		case 0:
		default:
1213
			radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1214
1215
			break;
		case 1:
1216
			radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1217
1218
			break;
		case 2:
1219
			radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1220
1221
			break;
		case 3:
1222
			radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1223
1224
			break;
		case 4:
1225
			radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1226
1227
			break;
		case 5:
1228
			radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
			break;
		}
	} else {
		if (radeon_crtc->crtc_id == 1)
			radeon_crtc->crtc_offset =
				AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
		else
			radeon_crtc->crtc_offset = 0;
	}
	radeon_crtc->pll_id = -1;
1239
1240
	drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
}