intel_dp.c revision 280369
1/*
2 * Copyright �� 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 *    Keith Packard <keithp@keithp.com>
25 *
26 */
27
28#include <sys/cdefs.h>
29__FBSDID("$FreeBSD: stable/10/sys/dev/drm2/i915/intel_dp.c 280369 2015-03-23 13:38:33Z kib $");
30
31#include <dev/drm2/drmP.h>
32#include <dev/drm2/drm.h>
33#include <dev/drm2/drm_crtc.h>
34#include <dev/drm2/drm_crtc_helper.h>
35#include <dev/drm2/i915/i915_drm.h>
36#include <dev/drm2/i915/i915_drv.h>
37#include <dev/drm2/i915/intel_drv.h>
38#include <dev/drm2/drm_dp_helper.h>
39
40#define DP_RECEIVER_CAP_SIZE	0xf
41#define DP_LINK_STATUS_SIZE	6
42#define DP_LINK_CHECK_TIMEOUT	(10 * 1000)
43
44#define DP_LINK_CONFIGURATION_SIZE	9
45
46struct intel_dp {
47	struct intel_encoder base;
48	uint32_t output_reg;
49	uint32_t DP;
50	uint8_t  link_configuration[DP_LINK_CONFIGURATION_SIZE];
51	bool has_audio;
52	enum hdmi_force_audio force_audio;
53	uint32_t color_range;
54	int dpms_mode;
55	uint8_t link_bw;
56	uint8_t lane_count;
57	uint8_t dpcd[DP_RECEIVER_CAP_SIZE];
58	device_t dp_iic_bus;
59	device_t adapter;
60	bool is_pch_edp;
61	uint8_t	train_set[4];
62	int panel_power_up_delay;
63	int panel_power_down_delay;
64	int panel_power_cycle_delay;
65	int backlight_on_delay;
66	int backlight_off_delay;
67	struct drm_display_mode *panel_fixed_mode;  /* for eDP */
68	struct timeout_task panel_vdd_task;
69	bool want_panel_vdd;
70};
71
72/**
73 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
74 * @intel_dp: DP struct
75 *
76 * If a CPU or PCH DP output is attached to an eDP panel, this function
77 * will return true, and false otherwise.
78 */
79static bool is_edp(struct intel_dp *intel_dp)
80{
81	return intel_dp->base.type == INTEL_OUTPUT_EDP;
82}
83
84/**
85 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
86 * @intel_dp: DP struct
87 *
88 * Returns true if the given DP struct corresponds to a PCH DP port attached
89 * to an eDP panel, false otherwise.  Helpful for determining whether we
90 * may need FDI resources for a given DP output or not.
91 */
92static bool is_pch_edp(struct intel_dp *intel_dp)
93{
94	return intel_dp->is_pch_edp;
95}
96
97/**
98 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
99 * @intel_dp: DP struct
100 *
101 * Returns true if the given DP struct corresponds to a CPU eDP port.
102 */
103static bool is_cpu_edp(struct intel_dp *intel_dp)
104{
105	return is_edp(intel_dp) && !is_pch_edp(intel_dp);
106}
107
108static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
109{
110	return container_of(encoder, struct intel_dp, base.base);
111}
112
113static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
114{
115	return container_of(intel_attached_encoder(connector),
116			    struct intel_dp, base);
117}
118
119/**
120 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
121 * @encoder: DRM encoder
122 *
123 * Return true if @encoder corresponds to a PCH attached eDP panel.  Needed
124 * by intel_display.c.
125 */
126bool intel_encoder_is_pch_edp(struct drm_encoder *encoder)
127{
128	struct intel_dp *intel_dp;
129
130	if (!encoder)
131		return false;
132
133	intel_dp = enc_to_intel_dp(encoder);
134
135	return is_pch_edp(intel_dp);
136}
137
138static void intel_dp_start_link_train(struct intel_dp *intel_dp);
139static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
140static void intel_dp_link_down(struct intel_dp *intel_dp);
141
142void
143intel_edp_link_config(struct intel_encoder *intel_encoder,
144		       int *lane_num, int *link_bw)
145{
146	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
147
148	*lane_num = intel_dp->lane_count;
149	if (intel_dp->link_bw == DP_LINK_BW_1_62)
150		*link_bw = 162000;
151	else if (intel_dp->link_bw == DP_LINK_BW_2_7)
152		*link_bw = 270000;
153}
154
155static int
156intel_dp_max_lane_count(struct intel_dp *intel_dp)
157{
158	int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
159	switch (max_lane_count) {
160	case 1: case 2: case 4:
161		break;
162	default:
163		max_lane_count = 4;
164	}
165	return max_lane_count;
166}
167
168static int
169intel_dp_max_link_bw(struct intel_dp *intel_dp)
170{
171	int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
172
173	switch (max_link_bw) {
174	case DP_LINK_BW_1_62:
175	case DP_LINK_BW_2_7:
176		break;
177	default:
178		max_link_bw = DP_LINK_BW_1_62;
179		break;
180	}
181	return max_link_bw;
182}
183
184static int
185intel_dp_link_clock(uint8_t link_bw)
186{
187	if (link_bw == DP_LINK_BW_2_7)
188		return 270000;
189	else
190		return 162000;
191}
192
193/*
194 * The units on the numbers in the next two are... bizarre.  Examples will
195 * make it clearer; this one parallels an example in the eDP spec.
196 *
197 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
198 *
199 *     270000 * 1 * 8 / 10 == 216000
200 *
201 * The actual data capacity of that configuration is 2.16Gbit/s, so the
202 * units are decakilobits.  ->clock in a drm_display_mode is in kilohertz -
203 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
204 * 119000.  At 18bpp that's 2142000 kilobits per second.
205 *
206 * Thus the strange-looking division by 10 in intel_dp_link_required, to
207 * get the result in decakilobits instead of kilobits.
208 */
209
210static int
211intel_dp_link_required(int pixel_clock, int bpp)
212{
213	return (pixel_clock * bpp + 9) / 10;
214}
215
216static int
217intel_dp_max_data_rate(int max_link_clock, int max_lanes)
218{
219	return (max_link_clock * max_lanes * 8) / 10;
220}
221
222static bool
223intel_dp_adjust_dithering(struct intel_dp *intel_dp,
224			  const struct drm_display_mode *mode,
225			  struct drm_display_mode *adjusted_mode)
226{
227	int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
228	int max_lanes = intel_dp_max_lane_count(intel_dp);
229	int max_rate, mode_rate;
230
231	mode_rate = intel_dp_link_required(mode->clock, 24);
232	max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
233
234	if (mode_rate > max_rate) {
235		mode_rate = intel_dp_link_required(mode->clock, 18);
236		if (mode_rate > max_rate)
237			return false;
238
239		if (adjusted_mode)
240			adjusted_mode->private_flags
241				|= INTEL_MODE_DP_FORCE_6BPC;
242
243		return true;
244	}
245
246	return true;
247}
248
249static int
250intel_dp_mode_valid(struct drm_connector *connector,
251		    struct drm_display_mode *mode)
252{
253	struct intel_dp *intel_dp = intel_attached_dp(connector);
254
255	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
256		if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
257			return MODE_PANEL;
258
259		if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
260			return MODE_PANEL;
261	}
262
263	if (!intel_dp_adjust_dithering(intel_dp, mode, NULL))
264		return MODE_CLOCK_HIGH;
265
266	if (mode->clock < 10000)
267		return MODE_CLOCK_LOW;
268
269	return MODE_OK;
270}
271
272static uint32_t
273pack_aux(uint8_t *src, int src_bytes)
274{
275	int	i;
276	uint32_t v = 0;
277
278	if (src_bytes > 4)
279		src_bytes = 4;
280	for (i = 0; i < src_bytes; i++)
281		v |= ((uint32_t) src[i]) << ((3-i) * 8);
282	return v;
283}
284
285static void
286unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
287{
288	int i;
289	if (dst_bytes > 4)
290		dst_bytes = 4;
291	for (i = 0; i < dst_bytes; i++)
292		dst[i] = src >> ((3-i) * 8);
293}
294
295/* hrawclock is 1/4 the FSB frequency */
296static int
297intel_hrawclk(struct drm_device *dev)
298{
299	struct drm_i915_private *dev_priv = dev->dev_private;
300	uint32_t clkcfg;
301
302	clkcfg = I915_READ(CLKCFG);
303	switch (clkcfg & CLKCFG_FSB_MASK) {
304	case CLKCFG_FSB_400:
305		return 100;
306	case CLKCFG_FSB_533:
307		return 133;
308	case CLKCFG_FSB_667:
309		return 166;
310	case CLKCFG_FSB_800:
311		return 200;
312	case CLKCFG_FSB_1067:
313		return 266;
314	case CLKCFG_FSB_1333:
315		return 333;
316	/* these two are just a guess; one of them might be right */
317	case CLKCFG_FSB_1600:
318	case CLKCFG_FSB_1600_ALT:
319		return 400;
320	default:
321		return 133;
322	}
323}
324
325static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
326{
327	struct drm_device *dev = intel_dp->base.base.dev;
328	struct drm_i915_private *dev_priv = dev->dev_private;
329
330	return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
331}
332
333static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
334{
335	struct drm_device *dev = intel_dp->base.base.dev;
336	struct drm_i915_private *dev_priv = dev->dev_private;
337
338	return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
339}
340
341static void
342intel_dp_check_edp(struct intel_dp *intel_dp)
343{
344	struct drm_device *dev = intel_dp->base.base.dev;
345	struct drm_i915_private *dev_priv = dev->dev_private;
346
347	if (!is_edp(intel_dp))
348		return;
349	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
350		printf("eDP powered off while attempting aux channel communication.\n");
351		DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
352			      I915_READ(PCH_PP_STATUS),
353			      I915_READ(PCH_PP_CONTROL));
354	}
355}
356
357static int
358intel_dp_aux_ch(struct intel_dp *intel_dp,
359		uint8_t *send, int send_bytes,
360		uint8_t *recv, int recv_size)
361{
362	uint32_t output_reg = intel_dp->output_reg;
363	struct drm_device *dev = intel_dp->base.base.dev;
364	struct drm_i915_private *dev_priv = dev->dev_private;
365	uint32_t ch_ctl = output_reg + 0x10;
366	uint32_t ch_data = ch_ctl + 4;
367	int i;
368	int recv_bytes;
369	uint32_t status;
370	uint32_t aux_clock_divider;
371	int try, precharge = 5;
372
373	intel_dp_check_edp(intel_dp);
374	/* The clock divider is based off the hrawclk,
375	 * and would like to run at 2MHz. So, take the
376	 * hrawclk value and divide by 2 and use that
377	 *
378	 * Note that PCH attached eDP panels should use a 125MHz input
379	 * clock divider.
380	 */
381	if (is_cpu_edp(intel_dp)) {
382		if (IS_GEN6(dev) || IS_GEN7(dev))
383			aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
384		else
385			aux_clock_divider = 225; /* eDP input clock at 450Mhz */
386	} else if (HAS_PCH_SPLIT(dev))
387		aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */
388	else
389		aux_clock_divider = intel_hrawclk(dev) / 2;
390
391	/* Try to wait for any previous AUX channel activity */
392	for (try = 0; try < 3; try++) {
393		status = I915_READ(ch_ctl);
394		if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
395			break;
396		drm_msleep(1, "915ach");
397	}
398
399	if (try == 3) {
400		printf("dp_aux_ch not started status 0x%08x\n",
401		     I915_READ(ch_ctl));
402		return -EBUSY;
403	}
404
405	/* Must try at least 3 times according to DP spec */
406	for (try = 0; try < 5; try++) {
407		/* Load the send data into the aux channel data registers */
408		for (i = 0; i < send_bytes; i += 4)
409			I915_WRITE(ch_data + i,
410				   pack_aux(send + i, send_bytes - i));
411
412		/* Send the command and wait for it to complete */
413		I915_WRITE(ch_ctl,
414			   DP_AUX_CH_CTL_SEND_BUSY |
415			   DP_AUX_CH_CTL_TIME_OUT_400us |
416			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
417			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
418			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
419			   DP_AUX_CH_CTL_DONE |
420			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
421			   DP_AUX_CH_CTL_RECEIVE_ERROR);
422		for (;;) {
423			status = I915_READ(ch_ctl);
424			if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
425				break;
426			DELAY(100);
427		}
428
429		/* Clear done status and any errors */
430		I915_WRITE(ch_ctl,
431			   status |
432			   DP_AUX_CH_CTL_DONE |
433			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
434			   DP_AUX_CH_CTL_RECEIVE_ERROR);
435
436		if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR |
437			      DP_AUX_CH_CTL_RECEIVE_ERROR))
438			continue;
439		if (status & DP_AUX_CH_CTL_DONE)
440			break;
441	}
442
443	if ((status & DP_AUX_CH_CTL_DONE) == 0) {
444		DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
445		return -EBUSY;
446	}
447
448	/* Check for timeout or receive error.
449	 * Timeouts occur when the sink is not connected
450	 */
451	if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
452		DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
453		return -EIO;
454	}
455
456	/* Timeouts occur when the device isn't connected, so they're
457	 * "normal" -- don't fill the kernel log with these */
458	if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
459		DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
460		return -ETIMEDOUT;
461	}
462
463	/* Unload any bytes sent back from the other side */
464	recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
465		      DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
466	if (recv_bytes > recv_size)
467		recv_bytes = recv_size;
468
469	for (i = 0; i < recv_bytes; i += 4)
470		unpack_aux(I915_READ(ch_data + i),
471			   recv + i, recv_bytes - i);
472
473	return recv_bytes;
474}
475
476/* Write data to the aux channel in native mode */
477static int
478intel_dp_aux_native_write(struct intel_dp *intel_dp,
479			  uint16_t address, uint8_t *send, int send_bytes)
480{
481	int ret;
482	uint8_t	msg[20];
483	int msg_bytes;
484	uint8_t	ack;
485
486	intel_dp_check_edp(intel_dp);
487	if (send_bytes > 16)
488		return -1;
489	msg[0] = AUX_NATIVE_WRITE << 4;
490	msg[1] = address >> 8;
491	msg[2] = address & 0xff;
492	msg[3] = send_bytes - 1;
493	memcpy(&msg[4], send, send_bytes);
494	msg_bytes = send_bytes + 4;
495	for (;;) {
496		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
497		if (ret < 0)
498			return ret;
499		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
500			break;
501		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
502			DELAY(100);
503		else
504			return -EIO;
505	}
506	return send_bytes;
507}
508
509/* Write a single byte to the aux channel in native mode */
510static int
511intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
512			    uint16_t address, uint8_t byte)
513{
514	return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
515}
516
517/* read bytes from a native aux channel */
518static int
519intel_dp_aux_native_read(struct intel_dp *intel_dp,
520			 uint16_t address, uint8_t *recv, int recv_bytes)
521{
522	uint8_t msg[4];
523	int msg_bytes;
524	uint8_t reply[20];
525	int reply_bytes;
526	uint8_t ack;
527	int ret;
528
529	intel_dp_check_edp(intel_dp);
530	msg[0] = AUX_NATIVE_READ << 4;
531	msg[1] = address >> 8;
532	msg[2] = address & 0xff;
533	msg[3] = recv_bytes - 1;
534
535	msg_bytes = 4;
536	reply_bytes = recv_bytes + 1;
537
538	for (;;) {
539		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
540				      reply, reply_bytes);
541		if (ret == 0)
542			return -EPROTO;
543		if (ret < 0)
544			return ret;
545		ack = reply[0];
546		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
547			memcpy(recv, reply + 1, ret - 1);
548			return ret - 1;
549		}
550		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
551			DELAY(100);
552		else
553			return -EIO;
554	}
555}
556
557static int
558intel_dp_i2c_aux_ch(device_t idev, int mode, uint8_t write_byte,
559    uint8_t *read_byte)
560{
561	struct iic_dp_aux_data *data;
562	struct intel_dp *intel_dp;
563	uint16_t address;
564	uint8_t msg[5];
565	uint8_t reply[2];
566	unsigned retry;
567	int msg_bytes;
568	int reply_bytes;
569	int ret;
570
571	data = device_get_softc(idev);
572	intel_dp = data->priv;
573	address = data->address;
574
575	intel_dp_check_edp(intel_dp);
576	/* Set up the command byte */
577	if (mode & MODE_I2C_READ)
578		msg[0] = AUX_I2C_READ << 4;
579	else
580		msg[0] = AUX_I2C_WRITE << 4;
581
582	if (!(mode & MODE_I2C_STOP))
583		msg[0] |= AUX_I2C_MOT << 4;
584
585	msg[1] = address >> 8;
586	msg[2] = address;
587
588	switch (mode) {
589	case MODE_I2C_WRITE:
590		msg[3] = 0;
591		msg[4] = write_byte;
592		msg_bytes = 5;
593		reply_bytes = 1;
594		break;
595	case MODE_I2C_READ:
596		msg[3] = 0;
597		msg_bytes = 4;
598		reply_bytes = 2;
599		break;
600	default:
601		msg_bytes = 3;
602		reply_bytes = 1;
603		break;
604	}
605
606	for (retry = 0; retry < 5; retry++) {
607		ret = intel_dp_aux_ch(intel_dp,
608				      msg, msg_bytes,
609				      reply, reply_bytes);
610		if (ret < 0) {
611			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
612			return (ret);
613		}
614
615		switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
616		case AUX_NATIVE_REPLY_ACK:
617			/* I2C-over-AUX Reply field is only valid
618			 * when paired with AUX ACK.
619			 */
620			break;
621		case AUX_NATIVE_REPLY_NACK:
622			DRM_DEBUG_KMS("aux_ch native nack\n");
623			return (-EREMOTEIO);
624		case AUX_NATIVE_REPLY_DEFER:
625			DELAY(100);
626			continue;
627		default:
628			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
629				  reply[0]);
630			return (-EREMOTEIO);
631		}
632
633		switch (reply[0] & AUX_I2C_REPLY_MASK) {
634		case AUX_I2C_REPLY_ACK:
635			if (mode == MODE_I2C_READ) {
636				*read_byte = reply[1];
637			}
638			return (0/*reply_bytes - 1*/);
639		case AUX_I2C_REPLY_NACK:
640			DRM_DEBUG_KMS("aux_i2c nack\n");
641			return (-EREMOTEIO);
642		case AUX_I2C_REPLY_DEFER:
643			DRM_DEBUG_KMS("aux_i2c defer\n");
644			DELAY(100);
645			break;
646		default:
647			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
648			return (-EREMOTEIO);
649		}
650	}
651
652	DRM_ERROR("too many retries, giving up\n");
653	return (-EREMOTEIO);
654}
655
656static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
657static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
658
659static int
660intel_dp_i2c_init(struct intel_dp *intel_dp,
661		  struct intel_connector *intel_connector, const char *name)
662{
663	int ret;
664
665	DRM_DEBUG_KMS("i2c_init %s\n", name);
666
667	ironlake_edp_panel_vdd_on(intel_dp);
668	ret = iic_dp_aux_add_bus(intel_connector->base.dev->device, name,
669	    intel_dp_i2c_aux_ch, intel_dp, &intel_dp->dp_iic_bus,
670	    &intel_dp->adapter);
671	ironlake_edp_panel_vdd_off(intel_dp, false);
672	return (ret);
673}
674
675static bool
676intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
677		    struct drm_display_mode *adjusted_mode)
678{
679	struct drm_device *dev = encoder->dev;
680	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
681	int lane_count, clock;
682	int max_lane_count = intel_dp_max_lane_count(intel_dp);
683	int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
684	int bpp, mode_rate;
685	static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
686
687	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
688		intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
689		intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
690					mode, adjusted_mode);
691		/*
692		 * the mode->clock is used to calculate the Data&Link M/N
693		 * of the pipe. For the eDP the fixed clock should be used.
694		 */
695		mode->clock = intel_dp->panel_fixed_mode->clock;
696	}
697
698	DRM_DEBUG_KMS("DP link computation with max lane count %i "
699		      "max bw %02x pixel clock %iKHz\n",
700		      max_lane_count, bws[max_clock], mode->clock);
701
702	if (!intel_dp_adjust_dithering(intel_dp, adjusted_mode, adjusted_mode))
703		return false;
704
705	bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24;
706	mode_rate = intel_dp_link_required(mode->clock, bpp);
707
708	for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
709		for (clock = 0; clock <= max_clock; clock++) {
710			int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
711
712			if (mode_rate <= link_avail) {
713				intel_dp->link_bw = bws[clock];
714				intel_dp->lane_count = lane_count;
715				adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
716				DRM_DEBUG_KMS("DP link bw %02x lane "
717						"count %d clock %d bpp %d\n",
718				       intel_dp->link_bw, intel_dp->lane_count,
719				       adjusted_mode->clock, bpp);
720				DRM_DEBUG_KMS("DP link bw required %i available %i\n",
721					      mode_rate, link_avail);
722				return true;
723			}
724		}
725	}
726
727	return false;
728}
729
730struct intel_dp_m_n {
731	uint32_t	tu;
732	uint32_t	gmch_m;
733	uint32_t	gmch_n;
734	uint32_t	link_m;
735	uint32_t	link_n;
736};
737
738static void
739intel_reduce_ratio(uint32_t *num, uint32_t *den)
740{
741	while (*num > 0xffffff || *den > 0xffffff) {
742		*num >>= 1;
743		*den >>= 1;
744	}
745}
746
747static void
748intel_dp_compute_m_n(int bpp,
749		     int nlanes,
750		     int pixel_clock,
751		     int link_clock,
752		     struct intel_dp_m_n *m_n)
753{
754	m_n->tu = 64;
755	m_n->gmch_m = (pixel_clock * bpp) >> 3;
756	m_n->gmch_n = link_clock * nlanes;
757	intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
758	m_n->link_m = pixel_clock;
759	m_n->link_n = link_clock;
760	intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
761}
762
763void
764intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
765		 struct drm_display_mode *adjusted_mode)
766{
767	struct drm_device *dev = crtc->dev;
768	struct drm_mode_config *mode_config = &dev->mode_config;
769	struct drm_encoder *encoder;
770	struct drm_i915_private *dev_priv = dev->dev_private;
771	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
772	int lane_count = 4;
773	struct intel_dp_m_n m_n;
774	int pipe = intel_crtc->pipe;
775
776	/*
777	 * Find the lane count in the intel_encoder private
778	 */
779	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
780		struct intel_dp *intel_dp;
781
782		if (encoder->crtc != crtc)
783			continue;
784
785		intel_dp = enc_to_intel_dp(encoder);
786		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
787		    intel_dp->base.type == INTEL_OUTPUT_EDP)
788		{
789			lane_count = intel_dp->lane_count;
790			break;
791		}
792	}
793
794	/*
795	 * Compute the GMCH and Link ratios. The '3' here is
796	 * the number of bytes_per_pixel post-LUT, which we always
797	 * set up for 8-bits of R/G/B, or 3 bytes total.
798	 */
799	intel_dp_compute_m_n(intel_crtc->bpp, lane_count,
800			     mode->clock, adjusted_mode->clock, &m_n);
801
802	if (HAS_PCH_SPLIT(dev)) {
803		I915_WRITE(TRANSDATA_M1(pipe),
804			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
805			   m_n.gmch_m);
806		I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
807		I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
808		I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
809	} else {
810		I915_WRITE(PIPE_GMCH_DATA_M(pipe),
811			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
812			   m_n.gmch_m);
813		I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
814		I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
815		I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
816	}
817}
818
819static void ironlake_edp_pll_on(struct drm_encoder *encoder);
820static void ironlake_edp_pll_off(struct drm_encoder *encoder);
821
822static void
823intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
824		  struct drm_display_mode *adjusted_mode)
825{
826	struct drm_device *dev = encoder->dev;
827	struct drm_i915_private *dev_priv = dev->dev_private;
828	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
829	struct drm_crtc *crtc = intel_dp->base.base.crtc;
830	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
831
832	/* Turn on the eDP PLL if needed */
833	if (is_edp(intel_dp)) {
834		if (!is_pch_edp(intel_dp))
835			ironlake_edp_pll_on(encoder);
836		else
837			ironlake_edp_pll_off(encoder);
838	}
839
840	/*
841	 * There are four kinds of DP registers:
842	 *
843	 * 	IBX PCH
844	 * 	SNB CPU
845	 *	IVB CPU
846	 * 	CPT PCH
847	 *
848	 * IBX PCH and CPU are the same for almost everything,
849	 * except that the CPU DP PLL is configured in this
850	 * register
851	 *
852	 * CPT PCH is quite different, having many bits moved
853	 * to the TRANS_DP_CTL register instead. That
854	 * configuration happens (oddly) in ironlake_pch_enable
855	 */
856
857	/* Preserve the BIOS-computed detected bit. This is
858	 * supposed to be read-only.
859	 */
860	intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
861	intel_dp->DP |=  DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
862
863	/* Handle DP bits in common between all three register formats */
864
865	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
866
867	switch (intel_dp->lane_count) {
868	case 1:
869		intel_dp->DP |= DP_PORT_WIDTH_1;
870		break;
871	case 2:
872		intel_dp->DP |= DP_PORT_WIDTH_2;
873		break;
874	case 4:
875		intel_dp->DP |= DP_PORT_WIDTH_4;
876		break;
877	}
878	if (intel_dp->has_audio) {
879		DRM_DEBUG_KMS("Enabling DP audio on pipe %c\n",
880				 pipe_name(intel_crtc->pipe));
881		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
882		intel_write_eld(encoder, adjusted_mode);
883	}
884	memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
885	intel_dp->link_configuration[0] = intel_dp->link_bw;
886	intel_dp->link_configuration[1] = intel_dp->lane_count;
887	/*
888	 * Check for DPCD version > 1.1 and enhanced framing support
889	 */
890	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
891	    (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
892		intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
893	}
894
895	/* Split out the IBX/CPU vs CPT settings */
896
897	if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
898		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
899			intel_dp->DP |= DP_SYNC_HS_HIGH;
900		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
901			intel_dp->DP |= DP_SYNC_VS_HIGH;
902		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
903
904		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
905			intel_dp->DP |= DP_ENHANCED_FRAMING;
906
907		intel_dp->DP |= intel_crtc->pipe << 29;
908
909		/* don't miss out required setting for eDP */
910		intel_dp->DP |= DP_PLL_ENABLE;
911		if (adjusted_mode->clock < 200000)
912			intel_dp->DP |= DP_PLL_FREQ_160MHZ;
913		else
914			intel_dp->DP |= DP_PLL_FREQ_270MHZ;
915	} else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
916		intel_dp->DP |= intel_dp->color_range;
917
918		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
919			intel_dp->DP |= DP_SYNC_HS_HIGH;
920		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
921			intel_dp->DP |= DP_SYNC_VS_HIGH;
922		intel_dp->DP |= DP_LINK_TRAIN_OFF;
923
924		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
925			intel_dp->DP |= DP_ENHANCED_FRAMING;
926
927		if (intel_crtc->pipe == 1)
928			intel_dp->DP |= DP_PIPEB_SELECT;
929
930		if (is_cpu_edp(intel_dp)) {
931			/* don't miss out required setting for eDP */
932			intel_dp->DP |= DP_PLL_ENABLE;
933			if (adjusted_mode->clock < 200000)
934				intel_dp->DP |= DP_PLL_FREQ_160MHZ;
935			else
936				intel_dp->DP |= DP_PLL_FREQ_270MHZ;
937		}
938	} else {
939		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
940	}
941}
942
943#define IDLE_ON_MASK		(PP_ON | 0 	  | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
944#define IDLE_ON_VALUE   	(PP_ON | 0 	  | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_ON_IDLE)
945
946#define IDLE_OFF_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
947#define IDLE_OFF_VALUE		(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
948
949#define IDLE_CYCLE_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
950#define IDLE_CYCLE_VALUE	(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
951
952static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
953				       u32 mask,
954				       u32 value)
955{
956	struct drm_device *dev = intel_dp->base.base.dev;
957	struct drm_i915_private *dev_priv = dev->dev_private;
958
959	DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
960		      mask, value,
961		      I915_READ(PCH_PP_STATUS),
962		      I915_READ(PCH_PP_CONTROL));
963
964	if (_intel_wait_for(dev,
965	    (I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10, "915iwp")) {
966		DRM_ERROR("Panel status timeout: status %08x control %08x\n",
967			  I915_READ(PCH_PP_STATUS),
968			  I915_READ(PCH_PP_CONTROL));
969	}
970}
971
972static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
973{
974	DRM_DEBUG_KMS("Wait for panel power on\n");
975	ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
976}
977
978static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
979{
980	DRM_DEBUG_KMS("Wait for panel power off time\n");
981	ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
982}
983
984static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
985{
986	DRM_DEBUG_KMS("Wait for panel power cycle\n");
987	ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
988}
989
990
991/* Read the current pp_control value, unlocking the register if it
992 * is locked
993 */
994
995static  u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv)
996{
997	u32	control = I915_READ(PCH_PP_CONTROL);
998
999	control &= ~PANEL_UNLOCK_MASK;
1000	control |= PANEL_UNLOCK_REGS;
1001	return control;
1002}
1003
1004static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
1005{
1006	struct drm_device *dev = intel_dp->base.base.dev;
1007	struct drm_i915_private *dev_priv = dev->dev_private;
1008	u32 pp;
1009
1010	if (!is_edp(intel_dp))
1011		return;
1012	DRM_DEBUG_KMS("Turn eDP VDD on\n");
1013
1014	if (intel_dp->want_panel_vdd)
1015		printf("eDP VDD already requested on\n");
1016
1017	intel_dp->want_panel_vdd = true;
1018
1019	if (ironlake_edp_have_panel_vdd(intel_dp)) {
1020		DRM_DEBUG_KMS("eDP VDD already on\n");
1021		return;
1022	}
1023
1024	if (!ironlake_edp_have_panel_power(intel_dp))
1025		ironlake_wait_panel_power_cycle(intel_dp);
1026
1027	pp = ironlake_get_pp_control(dev_priv);
1028	pp |= EDP_FORCE_VDD;
1029	I915_WRITE(PCH_PP_CONTROL, pp);
1030	POSTING_READ(PCH_PP_CONTROL);
1031	DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1032		      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1033
1034	/*
1035	 * If the panel wasn't on, delay before accessing aux channel
1036	 */
1037	if (!ironlake_edp_have_panel_power(intel_dp)) {
1038		DRM_DEBUG_KMS("eDP was not running\n");
1039		drm_msleep(intel_dp->panel_power_up_delay, "915edpon");
1040	}
1041}
1042
1043static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
1044{
1045	struct drm_device *dev = intel_dp->base.base.dev;
1046	struct drm_i915_private *dev_priv = dev->dev_private;
1047	u32 pp;
1048
1049	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1050		pp = ironlake_get_pp_control(dev_priv);
1051		pp &= ~EDP_FORCE_VDD;
1052		I915_WRITE(PCH_PP_CONTROL, pp);
1053		POSTING_READ(PCH_PP_CONTROL);
1054
1055		/* Make sure sequencer is idle before allowing subsequent activity */
1056		DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1057			      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1058
1059		drm_msleep(intel_dp->panel_power_down_delay, "915vddo");
1060	}
1061}
1062
1063static void ironlake_panel_vdd_work(void *arg, int pending __unused)
1064{
1065	struct intel_dp *intel_dp = arg;
1066	struct drm_device *dev = intel_dp->base.base.dev;
1067
1068	sx_xlock(&dev->mode_config.mutex);
1069	ironlake_panel_vdd_off_sync(intel_dp);
1070	sx_xunlock(&dev->mode_config.mutex);
1071}
1072
1073static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1074{
1075	if (!is_edp(intel_dp))
1076		return;
1077
1078	DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
1079	if (!intel_dp->want_panel_vdd)
1080		printf("eDP VDD not forced on\n");
1081
1082	intel_dp->want_panel_vdd = false;
1083
1084	if (sync) {
1085		ironlake_panel_vdd_off_sync(intel_dp);
1086	} else {
1087		/*
1088		 * Queue the timer to fire a long
1089		 * time from now (relative to the power down delay)
1090		 * to keep the panel power up across a sequence of operations
1091		 */
1092		struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private;
1093		taskqueue_enqueue_timeout(dev_priv->tq,
1094		    &intel_dp->panel_vdd_task,
1095		    msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
1096	}
1097}
1098
1099static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1100{
1101	struct drm_device *dev = intel_dp->base.base.dev;
1102	struct drm_i915_private *dev_priv = dev->dev_private;
1103	u32 pp;
1104
1105	if (!is_edp(intel_dp))
1106		return;
1107
1108	DRM_DEBUG_KMS("Turn eDP power on\n");
1109
1110	if (ironlake_edp_have_panel_power(intel_dp)) {
1111		DRM_DEBUG_KMS("eDP power already on\n");
1112		return;
1113	}
1114
1115	ironlake_wait_panel_power_cycle(intel_dp);
1116
1117	pp = ironlake_get_pp_control(dev_priv);
1118	if (IS_GEN5(dev)) {
1119		/* ILK workaround: disable reset around power sequence */
1120		pp &= ~PANEL_POWER_RESET;
1121		I915_WRITE(PCH_PP_CONTROL, pp);
1122		POSTING_READ(PCH_PP_CONTROL);
1123	}
1124
1125	pp |= POWER_TARGET_ON;
1126	if (!IS_GEN5(dev))
1127		pp |= PANEL_POWER_RESET;
1128
1129	I915_WRITE(PCH_PP_CONTROL, pp);
1130	POSTING_READ(PCH_PP_CONTROL);
1131
1132	ironlake_wait_panel_on(intel_dp);
1133
1134	if (IS_GEN5(dev)) {
1135		pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1136		I915_WRITE(PCH_PP_CONTROL, pp);
1137		POSTING_READ(PCH_PP_CONTROL);
1138	}
1139}
1140
1141static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1142{
1143	struct drm_device *dev = intel_dp->base.base.dev;
1144	struct drm_i915_private *dev_priv = dev->dev_private;
1145	u32 pp;
1146
1147	if (!is_edp(intel_dp))
1148		return;
1149
1150	DRM_DEBUG_KMS("Turn eDP power off\n");
1151
1152	if (intel_dp->want_panel_vdd)
1153		printf("Cannot turn power off while VDD is on\n");
1154	ironlake_panel_vdd_off_sync(intel_dp); /* finish any pending work */
1155
1156	pp = ironlake_get_pp_control(dev_priv);
1157	pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1158	I915_WRITE(PCH_PP_CONTROL, pp);
1159	POSTING_READ(PCH_PP_CONTROL);
1160
1161	ironlake_wait_panel_off(intel_dp);
1162}
1163
1164static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1165{
1166	struct drm_device *dev = intel_dp->base.base.dev;
1167	struct drm_i915_private *dev_priv = dev->dev_private;
1168	u32 pp;
1169
1170	if (!is_edp(intel_dp))
1171		return;
1172
1173	DRM_DEBUG_KMS("\n");
1174	/*
1175	 * If we enable the backlight right away following a panel power
1176	 * on, we may see slight flicker as the panel syncs with the eDP
1177	 * link.  So delay a bit to make sure the image is solid before
1178	 * allowing it to appear.
1179	 */
1180	drm_msleep(intel_dp->backlight_on_delay, "915ebo");
1181	pp = ironlake_get_pp_control(dev_priv);
1182	pp |= EDP_BLC_ENABLE;
1183	I915_WRITE(PCH_PP_CONTROL, pp);
1184	POSTING_READ(PCH_PP_CONTROL);
1185}
1186
1187static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1188{
1189	struct drm_device *dev = intel_dp->base.base.dev;
1190	struct drm_i915_private *dev_priv = dev->dev_private;
1191	u32 pp;
1192
1193	if (!is_edp(intel_dp))
1194		return;
1195
1196	DRM_DEBUG_KMS("\n");
1197	pp = ironlake_get_pp_control(dev_priv);
1198	pp &= ~EDP_BLC_ENABLE;
1199	I915_WRITE(PCH_PP_CONTROL, pp);
1200	POSTING_READ(PCH_PP_CONTROL);
1201	drm_msleep(intel_dp->backlight_off_delay, "915bo1");
1202}
1203
1204static void ironlake_edp_pll_on(struct drm_encoder *encoder)
1205{
1206	struct drm_device *dev = encoder->dev;
1207	struct drm_i915_private *dev_priv = dev->dev_private;
1208	u32 dpa_ctl;
1209
1210	DRM_DEBUG_KMS("\n");
1211	dpa_ctl = I915_READ(DP_A);
1212	dpa_ctl |= DP_PLL_ENABLE;
1213	I915_WRITE(DP_A, dpa_ctl);
1214	POSTING_READ(DP_A);
1215	DELAY(200);
1216}
1217
1218static void ironlake_edp_pll_off(struct drm_encoder *encoder)
1219{
1220	struct drm_device *dev = encoder->dev;
1221	struct drm_i915_private *dev_priv = dev->dev_private;
1222	u32 dpa_ctl;
1223
1224	dpa_ctl = I915_READ(DP_A);
1225	dpa_ctl &= ~DP_PLL_ENABLE;
1226	I915_WRITE(DP_A, dpa_ctl);
1227	POSTING_READ(DP_A);
1228	DELAY(200);
1229}
1230
1231/* If the sink supports it, try to set the power state appropriately */
1232static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
1233{
1234	int ret, i;
1235
1236	/* Should have a valid DPCD by this point */
1237	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1238		return;
1239
1240	if (mode != DRM_MODE_DPMS_ON) {
1241		ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER,
1242						  DP_SET_POWER_D3);
1243		if (ret != 1)
1244			DRM_DEBUG("failed to write sink power state\n");
1245	} else {
1246		/*
1247		 * When turning on, we need to retry for 1ms to give the sink
1248		 * time to wake up.
1249		 */
1250		for (i = 0; i < 3; i++) {
1251			ret = intel_dp_aux_native_write_1(intel_dp,
1252							  DP_SET_POWER,
1253							  DP_SET_POWER_D0);
1254			if (ret == 1)
1255				break;
1256			drm_msleep(1, "915dps");
1257		}
1258	}
1259}
1260
1261static void intel_dp_prepare(struct drm_encoder *encoder)
1262{
1263	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1264
1265	ironlake_edp_backlight_off(intel_dp);
1266	ironlake_edp_panel_off(intel_dp);
1267
1268	/* Wake up the sink first */
1269	ironlake_edp_panel_vdd_on(intel_dp);
1270	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1271	intel_dp_link_down(intel_dp);
1272	ironlake_edp_panel_vdd_off(intel_dp, false);
1273
1274	/* Make sure the panel is off before trying to
1275	 * change the mode
1276	 */
1277}
1278
1279static void intel_dp_commit(struct drm_encoder *encoder)
1280{
1281	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1282	struct drm_device *dev = encoder->dev;
1283	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1284
1285	ironlake_edp_panel_vdd_on(intel_dp);
1286	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1287	intel_dp_start_link_train(intel_dp);
1288	ironlake_edp_panel_on(intel_dp);
1289	ironlake_edp_panel_vdd_off(intel_dp, true);
1290	intel_dp_complete_link_train(intel_dp);
1291	ironlake_edp_backlight_on(intel_dp);
1292
1293	intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1294
1295	if (HAS_PCH_CPT(dev))
1296		intel_cpt_verify_modeset(dev, intel_crtc->pipe);
1297}
1298
1299static void
1300intel_dp_dpms(struct drm_encoder *encoder, int mode)
1301{
1302	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1303	struct drm_device *dev = encoder->dev;
1304	struct drm_i915_private *dev_priv = dev->dev_private;
1305	uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1306
1307	if (mode != DRM_MODE_DPMS_ON) {
1308		ironlake_edp_backlight_off(intel_dp);
1309		ironlake_edp_panel_off(intel_dp);
1310
1311		ironlake_edp_panel_vdd_on(intel_dp);
1312		intel_dp_sink_dpms(intel_dp, mode);
1313		intel_dp_link_down(intel_dp);
1314		ironlake_edp_panel_vdd_off(intel_dp, false);
1315
1316		if (is_cpu_edp(intel_dp))
1317			ironlake_edp_pll_off(encoder);
1318	} else {
1319		if (is_cpu_edp(intel_dp))
1320			ironlake_edp_pll_on(encoder);
1321
1322		ironlake_edp_panel_vdd_on(intel_dp);
1323		intel_dp_sink_dpms(intel_dp, mode);
1324		if (!(dp_reg & DP_PORT_EN)) {
1325			intel_dp_start_link_train(intel_dp);
1326			ironlake_edp_panel_on(intel_dp);
1327			ironlake_edp_panel_vdd_off(intel_dp, true);
1328			intel_dp_complete_link_train(intel_dp);
1329		} else
1330			ironlake_edp_panel_vdd_off(intel_dp, false);
1331		ironlake_edp_backlight_on(intel_dp);
1332	}
1333	intel_dp->dpms_mode = mode;
1334}
1335/*
1336 * Native read with retry for link status and receiver capability reads for
1337 * cases where the sink may still be asleep.
1338 */
1339static bool
1340intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1341			       uint8_t *recv, int recv_bytes)
1342{
1343	int ret, i;
1344
1345	/*
1346	 * Sinks are *supposed* to come up within 1ms from an off state,
1347	 * but we're also supposed to retry 3 times per the spec.
1348	 */
1349	for (i = 0; i < 3; i++) {
1350		ret = intel_dp_aux_native_read(intel_dp, address, recv,
1351					       recv_bytes);
1352		if (ret == recv_bytes)
1353			return true;
1354		drm_msleep(1, "915dpl");
1355	}
1356
1357	return false;
1358}
1359
1360/*
1361 * Fetch AUX CH registers 0x202 - 0x207 which contain
1362 * link status information
1363 */
1364static bool
1365intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1366{
1367	return intel_dp_aux_native_read_retry(intel_dp,
1368					      DP_LANE0_1_STATUS,
1369					      link_status,
1370					      DP_LINK_STATUS_SIZE);
1371}
1372
1373static uint8_t
1374intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1375		     int r)
1376{
1377	return link_status[r - DP_LANE0_1_STATUS];
1378}
1379
1380static uint8_t
1381intel_get_adjust_request_voltage(uint8_t adjust_request[2],
1382				 int lane)
1383{
1384	int	    s = ((lane & 1) ?
1385			 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1386			 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1387	uint8_t l = adjust_request[lane>>1];
1388
1389	return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1390}
1391
1392static uint8_t
1393intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
1394				      int lane)
1395{
1396	int	    s = ((lane & 1) ?
1397			 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1398			 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1399	uint8_t l = adjust_request[lane>>1];
1400
1401	return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1402}
1403
1404
1405#if 0
1406static char	*voltage_names[] = {
1407	"0.4V", "0.6V", "0.8V", "1.2V"
1408};
1409static char	*pre_emph_names[] = {
1410	"0dB", "3.5dB", "6dB", "9.5dB"
1411};
1412static char	*link_train_names[] = {
1413	"pattern 1", "pattern 2", "idle", "off"
1414};
1415#endif
1416
1417/*
1418 * These are source-specific values; current Intel hardware supports
1419 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1420 */
1421
1422static uint8_t
1423intel_dp_voltage_max(struct intel_dp *intel_dp)
1424{
1425	struct drm_device *dev = intel_dp->base.base.dev;
1426
1427	if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
1428		return DP_TRAIN_VOLTAGE_SWING_800;
1429	else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1430		return DP_TRAIN_VOLTAGE_SWING_1200;
1431	else
1432		return DP_TRAIN_VOLTAGE_SWING_800;
1433}
1434
1435static uint8_t
1436intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1437{
1438	struct drm_device *dev = intel_dp->base.base.dev;
1439
1440	if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1441		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1442		case DP_TRAIN_VOLTAGE_SWING_400:
1443			return DP_TRAIN_PRE_EMPHASIS_6;
1444		case DP_TRAIN_VOLTAGE_SWING_600:
1445		case DP_TRAIN_VOLTAGE_SWING_800:
1446			return DP_TRAIN_PRE_EMPHASIS_3_5;
1447		default:
1448			return DP_TRAIN_PRE_EMPHASIS_0;
1449		}
1450	} else {
1451		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1452		case DP_TRAIN_VOLTAGE_SWING_400:
1453			return DP_TRAIN_PRE_EMPHASIS_6;
1454		case DP_TRAIN_VOLTAGE_SWING_600:
1455			return DP_TRAIN_PRE_EMPHASIS_6;
1456		case DP_TRAIN_VOLTAGE_SWING_800:
1457			return DP_TRAIN_PRE_EMPHASIS_3_5;
1458		case DP_TRAIN_VOLTAGE_SWING_1200:
1459		default:
1460			return DP_TRAIN_PRE_EMPHASIS_0;
1461		}
1462	}
1463}
1464
1465static void
1466intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1467{
1468	uint8_t v = 0;
1469	uint8_t p = 0;
1470	int lane;
1471	uint8_t	*adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
1472	uint8_t voltage_max;
1473	uint8_t preemph_max;
1474
1475	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1476		uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
1477		uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
1478
1479		if (this_v > v)
1480			v = this_v;
1481		if (this_p > p)
1482			p = this_p;
1483	}
1484
1485	voltage_max = intel_dp_voltage_max(intel_dp);
1486	if (v >= voltage_max)
1487		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
1488
1489	preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
1490	if (p >= preemph_max)
1491		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1492
1493	for (lane = 0; lane < 4; lane++)
1494		intel_dp->train_set[lane] = v | p;
1495}
1496
1497static uint32_t
1498intel_dp_signal_levels(uint8_t train_set)
1499{
1500	uint32_t	signal_levels = 0;
1501
1502	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1503	case DP_TRAIN_VOLTAGE_SWING_400:
1504	default:
1505		signal_levels |= DP_VOLTAGE_0_4;
1506		break;
1507	case DP_TRAIN_VOLTAGE_SWING_600:
1508		signal_levels |= DP_VOLTAGE_0_6;
1509		break;
1510	case DP_TRAIN_VOLTAGE_SWING_800:
1511		signal_levels |= DP_VOLTAGE_0_8;
1512		break;
1513	case DP_TRAIN_VOLTAGE_SWING_1200:
1514		signal_levels |= DP_VOLTAGE_1_2;
1515		break;
1516	}
1517	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1518	case DP_TRAIN_PRE_EMPHASIS_0:
1519	default:
1520		signal_levels |= DP_PRE_EMPHASIS_0;
1521		break;
1522	case DP_TRAIN_PRE_EMPHASIS_3_5:
1523		signal_levels |= DP_PRE_EMPHASIS_3_5;
1524		break;
1525	case DP_TRAIN_PRE_EMPHASIS_6:
1526		signal_levels |= DP_PRE_EMPHASIS_6;
1527		break;
1528	case DP_TRAIN_PRE_EMPHASIS_9_5:
1529		signal_levels |= DP_PRE_EMPHASIS_9_5;
1530		break;
1531	}
1532	return signal_levels;
1533}
1534
1535/* Gen6's DP voltage swing and pre-emphasis control */
1536static uint32_t
1537intel_gen6_edp_signal_levels(uint8_t train_set)
1538{
1539	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1540					 DP_TRAIN_PRE_EMPHASIS_MASK);
1541	switch (signal_levels) {
1542	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1543	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1544		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1545	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1546		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1547	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1548	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
1549		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1550	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1551	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1552		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1553	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1554	case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
1555		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1556	default:
1557		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1558			      "0x%x\n", signal_levels);
1559		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1560	}
1561}
1562
1563/* Gen7's DP voltage swing and pre-emphasis control */
1564static uint32_t
1565intel_gen7_edp_signal_levels(uint8_t train_set)
1566{
1567	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1568					 DP_TRAIN_PRE_EMPHASIS_MASK);
1569	switch (signal_levels) {
1570	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1571		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1572	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1573		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1574	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1575		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1576
1577	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1578		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1579	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1580		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1581
1582	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1583		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1584	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1585		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1586
1587	default:
1588		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1589			      "0x%x\n", signal_levels);
1590		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1591	}
1592}
1593
1594static uint8_t
1595intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1596		      int lane)
1597{
1598	int s = (lane & 1) * 4;
1599	uint8_t l = link_status[lane>>1];
1600
1601	return (l >> s) & 0xf;
1602}
1603
1604/* Check for clock recovery is done on all channels */
1605static bool
1606intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1607{
1608	int lane;
1609	uint8_t lane_status;
1610
1611	for (lane = 0; lane < lane_count; lane++) {
1612		lane_status = intel_get_lane_status(link_status, lane);
1613		if ((lane_status & DP_LANE_CR_DONE) == 0)
1614			return false;
1615	}
1616	return true;
1617}
1618
1619/* Check to see if channel eq is done on all channels */
1620#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1621			 DP_LANE_CHANNEL_EQ_DONE|\
1622			 DP_LANE_SYMBOL_LOCKED)
1623static bool
1624intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1625{
1626	uint8_t lane_align;
1627	uint8_t lane_status;
1628	int lane;
1629
1630	lane_align = intel_dp_link_status(link_status,
1631					  DP_LANE_ALIGN_STATUS_UPDATED);
1632	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1633		return false;
1634	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1635		lane_status = intel_get_lane_status(link_status, lane);
1636		if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1637			return false;
1638	}
1639	return true;
1640}
1641
1642static bool
1643intel_dp_set_link_train(struct intel_dp *intel_dp,
1644			uint32_t dp_reg_value,
1645			uint8_t dp_train_pat)
1646{
1647	struct drm_device *dev = intel_dp->base.base.dev;
1648	struct drm_i915_private *dev_priv = dev->dev_private;
1649	int ret;
1650
1651	I915_WRITE(intel_dp->output_reg, dp_reg_value);
1652	POSTING_READ(intel_dp->output_reg);
1653
1654	intel_dp_aux_native_write_1(intel_dp,
1655				    DP_TRAINING_PATTERN_SET,
1656				    dp_train_pat);
1657
1658	ret = intel_dp_aux_native_write(intel_dp,
1659					DP_TRAINING_LANE0_SET,
1660					intel_dp->train_set,
1661					intel_dp->lane_count);
1662	if (ret != intel_dp->lane_count)
1663		return false;
1664
1665	return true;
1666}
1667
1668/* Enable corresponding port and start training pattern 1 */
1669static void
1670intel_dp_start_link_train(struct intel_dp *intel_dp)
1671{
1672	struct drm_device *dev = intel_dp->base.base.dev;
1673	struct drm_i915_private *dev_priv = dev->dev_private;
1674	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1675	int i;
1676	uint8_t voltage;
1677	bool clock_recovery = false;
1678	int voltage_tries, loop_tries;
1679	u32 reg;
1680	uint32_t DP = intel_dp->DP;
1681
1682	/* Enable output, wait for it to become active */
1683	I915_WRITE(intel_dp->output_reg, intel_dp->DP);
1684	POSTING_READ(intel_dp->output_reg);
1685	intel_wait_for_vblank(dev, intel_crtc->pipe);
1686
1687	/* Write the link configuration data */
1688	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1689				  intel_dp->link_configuration,
1690				  DP_LINK_CONFIGURATION_SIZE);
1691
1692	DP |= DP_PORT_EN;
1693
1694	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1695		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1696	else
1697		DP &= ~DP_LINK_TRAIN_MASK;
1698	memset(intel_dp->train_set, 0, 4);
1699	voltage = 0xff;
1700	voltage_tries = 0;
1701	loop_tries = 0;
1702	clock_recovery = false;
1703	for (;;) {
1704		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1705		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1706		uint32_t    signal_levels;
1707
1708
1709		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1710			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1711			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1712		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1713			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1714			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1715		} else {
1716			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1717			DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
1718			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1719		}
1720
1721		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1722			reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1723		else
1724			reg = DP | DP_LINK_TRAIN_PAT_1;
1725
1726		if (!intel_dp_set_link_train(intel_dp, reg,
1727					     DP_TRAINING_PATTERN_1))
1728			break;
1729		/* Set training pattern 1 */
1730
1731		DELAY(100);
1732		if (!intel_dp_get_link_status(intel_dp, link_status)) {
1733			DRM_ERROR("failed to get link status\n");
1734			break;
1735		}
1736
1737		if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1738			DRM_DEBUG_KMS("clock recovery OK\n");
1739			clock_recovery = true;
1740			break;
1741		}
1742
1743		/* Check to see if we've tried the max voltage */
1744		for (i = 0; i < intel_dp->lane_count; i++)
1745			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1746				break;
1747		if (i == intel_dp->lane_count) {
1748			++loop_tries;
1749			if (loop_tries == 5) {
1750				DRM_DEBUG_KMS("too many full retries, give up\n");
1751				break;
1752			}
1753			memset(intel_dp->train_set, 0, 4);
1754			voltage_tries = 0;
1755			continue;
1756		}
1757
1758		/* Check to see if we've tried the same voltage 5 times */
1759		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1760			++voltage_tries;
1761			if (voltage_tries == 5) {
1762				DRM_DEBUG_KMS("too many voltage retries, give up\n");
1763				break;
1764			}
1765		} else
1766			voltage_tries = 0;
1767		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1768
1769		/* Compute new intel_dp->train_set as requested by target */
1770		intel_get_adjust_train(intel_dp, link_status);
1771	}
1772
1773	intel_dp->DP = DP;
1774}
1775
1776static void
1777intel_dp_complete_link_train(struct intel_dp *intel_dp)
1778{
1779	struct drm_device *dev = intel_dp->base.base.dev;
1780	struct drm_i915_private *dev_priv = dev->dev_private;
1781	bool channel_eq = false;
1782	int tries, cr_tries;
1783	u32 reg;
1784	uint32_t DP = intel_dp->DP;
1785
1786	/* channel equalization */
1787	tries = 0;
1788	cr_tries = 0;
1789	channel_eq = false;
1790	for (;;) {
1791		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1792		uint32_t    signal_levels;
1793		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1794
1795		if (cr_tries > 5) {
1796			DRM_ERROR("failed to train DP, aborting\n");
1797			intel_dp_link_down(intel_dp);
1798			break;
1799		}
1800
1801		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1802			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1803			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1804		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1805			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1806			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1807		} else {
1808			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1809			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1810		}
1811
1812		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1813			reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1814		else
1815			reg = DP | DP_LINK_TRAIN_PAT_2;
1816
1817		/* channel eq pattern */
1818		if (!intel_dp_set_link_train(intel_dp, reg,
1819					     DP_TRAINING_PATTERN_2))
1820			break;
1821
1822		DELAY(400);
1823		if (!intel_dp_get_link_status(intel_dp, link_status))
1824			break;
1825
1826		/* Make sure clock is still ok */
1827		if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1828			intel_dp_start_link_train(intel_dp);
1829			cr_tries++;
1830			continue;
1831		}
1832
1833		if (intel_channel_eq_ok(intel_dp, link_status)) {
1834			channel_eq = true;
1835			break;
1836		}
1837
1838		/* Try 5 times, then try clock recovery if that fails */
1839		if (tries > 5) {
1840			intel_dp_link_down(intel_dp);
1841			intel_dp_start_link_train(intel_dp);
1842			tries = 0;
1843			cr_tries++;
1844			continue;
1845		}
1846
1847		/* Compute new intel_dp->train_set as requested by target */
1848		intel_get_adjust_train(intel_dp, link_status);
1849		++tries;
1850	}
1851
1852	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1853		reg = DP | DP_LINK_TRAIN_OFF_CPT;
1854	else
1855		reg = DP | DP_LINK_TRAIN_OFF;
1856
1857	I915_WRITE(intel_dp->output_reg, reg);
1858	POSTING_READ(intel_dp->output_reg);
1859	intel_dp_aux_native_write_1(intel_dp,
1860				    DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1861}
1862
1863static void
1864intel_dp_link_down(struct intel_dp *intel_dp)
1865{
1866	struct drm_device *dev = intel_dp->base.base.dev;
1867	struct drm_i915_private *dev_priv = dev->dev_private;
1868	uint32_t DP = intel_dp->DP;
1869
1870	if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1871		return;
1872
1873	DRM_DEBUG_KMS("\n");
1874
1875	if (is_edp(intel_dp)) {
1876		DP &= ~DP_PLL_ENABLE;
1877		I915_WRITE(intel_dp->output_reg, DP);
1878		POSTING_READ(intel_dp->output_reg);
1879		DELAY(100);
1880	}
1881
1882	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
1883		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1884		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1885	} else {
1886		DP &= ~DP_LINK_TRAIN_MASK;
1887		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1888	}
1889	POSTING_READ(intel_dp->output_reg);
1890
1891	drm_msleep(17, "915dlo");
1892
1893	if (is_edp(intel_dp)) {
1894		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1895			DP |= DP_LINK_TRAIN_OFF_CPT;
1896		else
1897			DP |= DP_LINK_TRAIN_OFF;
1898	}
1899
1900
1901	if (!HAS_PCH_CPT(dev) &&
1902	    I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1903		struct drm_crtc *crtc = intel_dp->base.base.crtc;
1904
1905		/* Hardware workaround: leaving our transcoder select
1906		 * set to transcoder B while it's off will prevent the
1907		 * corresponding HDMI output on transcoder A.
1908		 *
1909		 * Combine this with another hardware workaround:
1910		 * transcoder select bit can only be cleared while the
1911		 * port is enabled.
1912		 */
1913		DP &= ~DP_PIPEB_SELECT;
1914		I915_WRITE(intel_dp->output_reg, DP);
1915
1916		/* Changes to enable or select take place the vblank
1917		 * after being written.
1918		 */
1919		if (crtc == NULL) {
1920			/* We can arrive here never having been attached
1921			 * to a CRTC, for instance, due to inheriting
1922			 * random state from the BIOS.
1923			 *
1924			 * If the pipe is not running, play safe and
1925			 * wait for the clocks to stabilise before
1926			 * continuing.
1927			 */
1928			POSTING_READ(intel_dp->output_reg);
1929			drm_msleep(50, "915dla");
1930		} else
1931			intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe);
1932	}
1933
1934	DP &= ~DP_AUDIO_OUTPUT_ENABLE;
1935	I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1936	POSTING_READ(intel_dp->output_reg);
1937	drm_msleep(intel_dp->panel_power_down_delay, "915ldo");
1938}
1939
1940static bool
1941intel_dp_get_dpcd(struct intel_dp *intel_dp)
1942{
1943	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
1944					   sizeof(intel_dp->dpcd)) &&
1945	    (intel_dp->dpcd[DP_DPCD_REV] != 0)) {
1946		return true;
1947	}
1948
1949	return false;
1950}
1951
1952static void
1953intel_dp_probe_oui(struct intel_dp *intel_dp)
1954{
1955	u8 buf[3];
1956
1957	if (!(intel_dp->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
1958		return;
1959
1960	if (intel_dp_aux_native_read_retry(intel_dp, DP_SINK_OUI, buf, 3))
1961		DRM_DEBUG_KMS("Sink OUI: %02x%02x%02x\n",
1962			      buf[0], buf[1], buf[2]);
1963
1964	if (intel_dp_aux_native_read_retry(intel_dp, DP_BRANCH_OUI, buf, 3))
1965		DRM_DEBUG_KMS("Branch OUI: %02x%02x%02x\n",
1966			      buf[0], buf[1], buf[2]);
1967}
1968
1969static bool
1970intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
1971{
1972	int ret;
1973
1974	ret = intel_dp_aux_native_read_retry(intel_dp,
1975					     DP_DEVICE_SERVICE_IRQ_VECTOR,
1976					     sink_irq_vector, 1);
1977	if (!ret)
1978		return false;
1979
1980	return true;
1981}
1982
1983static void
1984intel_dp_handle_test_request(struct intel_dp *intel_dp)
1985{
1986	/* NAK by default */
1987	intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
1988}
1989
1990/*
1991 * According to DP spec
1992 * 5.1.2:
1993 *  1. Read DPCD
1994 *  2. Configure link according to Receiver Capabilities
1995 *  3. Use Link Training from 2.5.3.3 and 3.5.1.3
1996 *  4. Check link status on receipt of hot-plug interrupt
1997 */
1998
1999static void
2000intel_dp_check_link_status(struct intel_dp *intel_dp)
2001{
2002	u8 sink_irq_vector;
2003	u8 link_status[DP_LINK_STATUS_SIZE];
2004
2005	if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
2006		return;
2007
2008	if (!intel_dp->base.base.crtc)
2009		return;
2010
2011	/* Try to read receiver status if the link appears to be up */
2012	if (!intel_dp_get_link_status(intel_dp, link_status)) {
2013		intel_dp_link_down(intel_dp);
2014		return;
2015	}
2016
2017	/* Now read the DPCD to see if it's actually running */
2018	if (!intel_dp_get_dpcd(intel_dp)) {
2019		intel_dp_link_down(intel_dp);
2020		return;
2021	}
2022
2023	/* Try to read the source of the interrupt */
2024	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
2025	    intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
2026		/* Clear interrupt source */
2027		intel_dp_aux_native_write_1(intel_dp,
2028					    DP_DEVICE_SERVICE_IRQ_VECTOR,
2029					    sink_irq_vector);
2030
2031		if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
2032			intel_dp_handle_test_request(intel_dp);
2033		if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
2034			DRM_DEBUG_KMS("CP or sink specific irq unhandled\n");
2035	}
2036
2037	if (!intel_channel_eq_ok(intel_dp, link_status)) {
2038		DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2039			      drm_get_encoder_name(&intel_dp->base.base));
2040 		intel_dp_start_link_train(intel_dp);
2041		intel_dp_complete_link_train(intel_dp);
2042	}
2043}
2044
2045static enum drm_connector_status
2046intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2047{
2048	if (intel_dp_get_dpcd(intel_dp))
2049		return connector_status_connected;
2050	return connector_status_disconnected;
2051}
2052
2053static enum drm_connector_status
2054ironlake_dp_detect(struct intel_dp *intel_dp)
2055{
2056	enum drm_connector_status status;
2057
2058	/* Can't disconnect eDP, but you can close the lid... */
2059	if (is_edp(intel_dp)) {
2060		status = intel_panel_detect(intel_dp->base.base.dev);
2061		if (status == connector_status_unknown)
2062			status = connector_status_connected;
2063		return status;
2064	}
2065
2066	return intel_dp_detect_dpcd(intel_dp);
2067}
2068
2069static enum drm_connector_status
2070g4x_dp_detect(struct intel_dp *intel_dp)
2071{
2072	struct drm_device *dev = intel_dp->base.base.dev;
2073	struct drm_i915_private *dev_priv = dev->dev_private;
2074	uint32_t temp, bit;
2075
2076	switch (intel_dp->output_reg) {
2077	case DP_B:
2078		bit = DPB_HOTPLUG_INT_STATUS;
2079		break;
2080	case DP_C:
2081		bit = DPC_HOTPLUG_INT_STATUS;
2082		break;
2083	case DP_D:
2084		bit = DPD_HOTPLUG_INT_STATUS;
2085		break;
2086	default:
2087		return connector_status_unknown;
2088	}
2089
2090	temp = I915_READ(PORT_HOTPLUG_STAT);
2091
2092	if ((temp & bit) == 0)
2093		return connector_status_disconnected;
2094
2095	return intel_dp_detect_dpcd(intel_dp);
2096}
2097
2098static struct edid *
2099intel_dp_get_edid(struct drm_connector *connector, device_t adapter)
2100{
2101	struct intel_dp *intel_dp = intel_attached_dp(connector);
2102	struct edid	*edid;
2103
2104	ironlake_edp_panel_vdd_on(intel_dp);
2105	edid = drm_get_edid(connector, adapter);
2106	ironlake_edp_panel_vdd_off(intel_dp, false);
2107	return edid;
2108}
2109
2110static int
2111intel_dp_get_edid_modes(struct drm_connector *connector, device_t adapter)
2112{
2113	struct intel_dp *intel_dp = intel_attached_dp(connector);
2114	int	ret;
2115
2116	ironlake_edp_panel_vdd_on(intel_dp);
2117	ret = intel_ddc_get_modes(connector, adapter);
2118	ironlake_edp_panel_vdd_off(intel_dp, false);
2119	return ret;
2120}
2121
2122
2123/**
2124 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2125 *
2126 * \return true if DP port is connected.
2127 * \return false if DP port is disconnected.
2128 */
2129static enum drm_connector_status
2130intel_dp_detect(struct drm_connector *connector, bool force)
2131{
2132	struct intel_dp *intel_dp = intel_attached_dp(connector);
2133	struct drm_device *dev = intel_dp->base.base.dev;
2134	enum drm_connector_status status;
2135	struct edid *edid = NULL;
2136
2137	intel_dp->has_audio = false;
2138
2139	if (HAS_PCH_SPLIT(dev))
2140		status = ironlake_dp_detect(intel_dp);
2141	else
2142		status = g4x_dp_detect(intel_dp);
2143	if (status != connector_status_connected)
2144		return status;
2145
2146	intel_dp_probe_oui(intel_dp);
2147
2148	if (intel_dp->force_audio != HDMI_AUDIO_AUTO) {
2149		intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON);
2150	} else {
2151		edid = intel_dp_get_edid(connector, intel_dp->adapter);
2152		if (edid) {
2153			intel_dp->has_audio = drm_detect_monitor_audio(edid);
2154			connector->display_info.raw_edid = NULL;
2155			free(edid, DRM_MEM_KMS);
2156		}
2157	}
2158
2159	return connector_status_connected;
2160}
2161
2162static int intel_dp_get_modes(struct drm_connector *connector)
2163{
2164	struct intel_dp *intel_dp = intel_attached_dp(connector);
2165	struct drm_device *dev = intel_dp->base.base.dev;
2166	struct drm_i915_private *dev_priv = dev->dev_private;
2167	int ret;
2168
2169	/* We should parse the EDID data and find out if it has an audio sink
2170	 */
2171
2172	ret = intel_dp_get_edid_modes(connector, intel_dp->adapter);
2173	if (ret) {
2174		if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
2175			struct drm_display_mode *newmode;
2176			list_for_each_entry(newmode, &connector->probed_modes,
2177					    head) {
2178				if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
2179					intel_dp->panel_fixed_mode =
2180						drm_mode_duplicate(dev, newmode);
2181					break;
2182				}
2183			}
2184		}
2185		return ret;
2186	}
2187
2188	/* if eDP has no EDID, try to use fixed panel mode from VBT */
2189	if (is_edp(intel_dp)) {
2190		/* initialize panel mode from VBT if available for eDP */
2191		if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
2192			intel_dp->panel_fixed_mode =
2193				drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
2194			if (intel_dp->panel_fixed_mode) {
2195				intel_dp->panel_fixed_mode->type |=
2196					DRM_MODE_TYPE_PREFERRED;
2197			}
2198		}
2199		if (intel_dp->panel_fixed_mode) {
2200			struct drm_display_mode *mode;
2201			mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
2202			drm_mode_probed_add(connector, mode);
2203			return 1;
2204		}
2205	}
2206	return 0;
2207}
2208
2209static bool
2210intel_dp_detect_audio(struct drm_connector *connector)
2211{
2212	struct intel_dp *intel_dp = intel_attached_dp(connector);
2213	struct edid *edid;
2214	bool has_audio = false;
2215
2216	edid = intel_dp_get_edid(connector, intel_dp->adapter);
2217	if (edid) {
2218		has_audio = drm_detect_monitor_audio(edid);
2219
2220		connector->display_info.raw_edid = NULL;
2221		free(edid, DRM_MEM_KMS);
2222	}
2223
2224	return has_audio;
2225}
2226
2227static int
2228intel_dp_set_property(struct drm_connector *connector,
2229		      struct drm_property *property,
2230		      uint64_t val)
2231{
2232	struct drm_i915_private *dev_priv = connector->dev->dev_private;
2233	struct intel_dp *intel_dp = intel_attached_dp(connector);
2234	int ret;
2235
2236	ret = drm_connector_property_set_value(connector, property, val);
2237	if (ret)
2238		return ret;
2239
2240	if (property == dev_priv->force_audio_property) {
2241		int i = val;
2242		bool has_audio;
2243
2244		if (i == intel_dp->force_audio)
2245			return 0;
2246
2247		intel_dp->force_audio = i;
2248
2249		if (i == HDMI_AUDIO_AUTO)
2250			has_audio = intel_dp_detect_audio(connector);
2251		else
2252			has_audio = (i == HDMI_AUDIO_ON);
2253
2254		if (has_audio == intel_dp->has_audio)
2255			return 0;
2256
2257		intel_dp->has_audio = has_audio;
2258		goto done;
2259	}
2260
2261	if (property == dev_priv->broadcast_rgb_property) {
2262		if (val == !!intel_dp->color_range)
2263			return 0;
2264
2265		intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
2266		goto done;
2267	}
2268
2269	return -EINVAL;
2270
2271done:
2272	if (intel_dp->base.base.crtc) {
2273		struct drm_crtc *crtc = intel_dp->base.base.crtc;
2274		drm_crtc_helper_set_mode(crtc, &crtc->mode,
2275					 crtc->x, crtc->y,
2276					 crtc->fb);
2277	}
2278
2279	return 0;
2280}
2281
2282static void
2283intel_dp_destroy(struct drm_connector *connector)
2284{
2285	struct drm_device *dev = connector->dev;
2286
2287	if (intel_dpd_is_edp(dev))
2288		intel_panel_destroy_backlight(dev);
2289
2290#if 0
2291	drm_sysfs_connector_remove(connector);
2292#endif
2293	drm_connector_cleanup(connector);
2294	free(connector, DRM_MEM_KMS);
2295}
2296
2297static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
2298{
2299	struct drm_device *dev;
2300	struct intel_dp *intel_dp;
2301
2302	intel_dp = enc_to_intel_dp(encoder);
2303	dev = encoder->dev;
2304
2305	if (intel_dp->dp_iic_bus != NULL) {
2306		if (intel_dp->adapter != NULL) {
2307			device_delete_child(intel_dp->dp_iic_bus,
2308			    intel_dp->adapter);
2309		}
2310		device_delete_child(dev->device, intel_dp->dp_iic_bus);
2311	}
2312	drm_encoder_cleanup(encoder);
2313	if (is_edp(intel_dp)) {
2314		struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private;
2315
2316		taskqueue_cancel_timeout(dev_priv->tq,
2317		    &intel_dp->panel_vdd_task, NULL);
2318		taskqueue_drain_timeout(dev_priv->tq,
2319		    &intel_dp->panel_vdd_task);
2320		ironlake_panel_vdd_off_sync(intel_dp);
2321	}
2322	free(intel_dp, DRM_MEM_KMS);
2323}
2324
2325static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
2326	.dpms = intel_dp_dpms,
2327	.mode_fixup = intel_dp_mode_fixup,
2328	.prepare = intel_dp_prepare,
2329	.mode_set = intel_dp_mode_set,
2330	.commit = intel_dp_commit,
2331};
2332
2333static const struct drm_connector_funcs intel_dp_connector_funcs = {
2334	.dpms = drm_helper_connector_dpms,
2335	.detect = intel_dp_detect,
2336	.fill_modes = drm_helper_probe_single_connector_modes,
2337	.set_property = intel_dp_set_property,
2338	.destroy = intel_dp_destroy,
2339};
2340
2341static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
2342	.get_modes = intel_dp_get_modes,
2343	.mode_valid = intel_dp_mode_valid,
2344	.best_encoder = intel_best_encoder,
2345};
2346
2347static const struct drm_encoder_funcs intel_dp_enc_funcs = {
2348	.destroy = intel_dp_encoder_destroy,
2349};
2350
2351static void
2352intel_dp_hot_plug(struct intel_encoder *intel_encoder)
2353{
2354	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
2355
2356	intel_dp_check_link_status(intel_dp);
2357}
2358
2359/* Return which DP Port should be selected for Transcoder DP control */
2360int
2361intel_trans_dp_port_sel(struct drm_crtc *crtc)
2362{
2363	struct drm_device *dev = crtc->dev;
2364	struct drm_mode_config *mode_config = &dev->mode_config;
2365	struct drm_encoder *encoder;
2366
2367	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
2368		struct intel_dp *intel_dp;
2369
2370		if (encoder->crtc != crtc)
2371			continue;
2372
2373		intel_dp = enc_to_intel_dp(encoder);
2374		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
2375		    intel_dp->base.type == INTEL_OUTPUT_EDP)
2376			return intel_dp->output_reg;
2377	}
2378
2379	return -1;
2380}
2381
2382/* check the VBT to see whether the eDP is on DP-D port */
2383bool intel_dpd_is_edp(struct drm_device *dev)
2384{
2385	struct drm_i915_private *dev_priv = dev->dev_private;
2386	struct child_device_config *p_child;
2387	int i;
2388
2389	if (!dev_priv->child_dev_num)
2390		return false;
2391
2392	for (i = 0; i < dev_priv->child_dev_num; i++) {
2393		p_child = dev_priv->child_dev + i;
2394
2395		if (p_child->dvo_port == PORT_IDPD &&
2396		    p_child->device_type == DEVICE_TYPE_eDP)
2397			return true;
2398	}
2399	return false;
2400}
2401
2402static void
2403intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
2404{
2405	intel_attach_force_audio_property(connector);
2406	intel_attach_broadcast_rgb_property(connector);
2407}
2408
2409void
2410intel_dp_init(struct drm_device *dev, int output_reg)
2411{
2412	struct drm_i915_private *dev_priv = dev->dev_private;
2413	struct drm_connector *connector;
2414	struct intel_dp *intel_dp;
2415	struct intel_encoder *intel_encoder;
2416	struct intel_connector *intel_connector;
2417	const char *name = NULL;
2418	int type;
2419
2420	intel_dp = malloc(sizeof(struct intel_dp), DRM_MEM_KMS,
2421	    M_WAITOK | M_ZERO);
2422
2423	intel_dp->output_reg = output_reg;
2424	intel_dp->dpms_mode = -1;
2425
2426	intel_connector = malloc(sizeof(struct intel_connector), DRM_MEM_KMS,
2427	    M_WAITOK | M_ZERO);
2428	intel_encoder = &intel_dp->base;
2429
2430	if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
2431		if (intel_dpd_is_edp(dev))
2432			intel_dp->is_pch_edp = true;
2433
2434	if (output_reg == DP_A || is_pch_edp(intel_dp)) {
2435		type = DRM_MODE_CONNECTOR_eDP;
2436		intel_encoder->type = INTEL_OUTPUT_EDP;
2437	} else {
2438		type = DRM_MODE_CONNECTOR_DisplayPort;
2439		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
2440	}
2441
2442	connector = &intel_connector->base;
2443	drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
2444	drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
2445
2446	connector->polled = DRM_CONNECTOR_POLL_HPD;
2447
2448	if (output_reg == DP_B || output_reg == PCH_DP_B)
2449		intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
2450	else if (output_reg == DP_C || output_reg == PCH_DP_C)
2451		intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
2452	else if (output_reg == DP_D || output_reg == PCH_DP_D)
2453		intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
2454
2455	if (is_edp(intel_dp)) {
2456		intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
2457		TIMEOUT_TASK_INIT(dev_priv->tq, &intel_dp->panel_vdd_task, 0,
2458		    ironlake_panel_vdd_work, intel_dp);
2459	}
2460
2461	intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
2462
2463	connector->interlace_allowed = true;
2464	connector->doublescan_allowed = 0;
2465
2466	drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
2467			 DRM_MODE_ENCODER_TMDS);
2468	drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
2469
2470	intel_connector_attach_encoder(intel_connector, intel_encoder);
2471#if 0
2472	drm_sysfs_connector_add(connector);
2473#endif
2474
2475	/* Set up the DDC bus. */
2476	switch (output_reg) {
2477		case DP_A:
2478			name = "DPDDC-A";
2479			break;
2480		case DP_B:
2481		case PCH_DP_B:
2482			dev_priv->hotplug_supported_mask |=
2483				HDMIB_HOTPLUG_INT_STATUS;
2484			name = "DPDDC-B";
2485			break;
2486		case DP_C:
2487		case PCH_DP_C:
2488			dev_priv->hotplug_supported_mask |=
2489				HDMIC_HOTPLUG_INT_STATUS;
2490			name = "DPDDC-C";
2491			break;
2492		case DP_D:
2493		case PCH_DP_D:
2494			dev_priv->hotplug_supported_mask |=
2495				HDMID_HOTPLUG_INT_STATUS;
2496			name = "DPDDC-D";
2497			break;
2498	}
2499
2500	/* Cache some DPCD data in the eDP case */
2501	if (is_edp(intel_dp)) {
2502		bool ret;
2503		struct edp_power_seq	cur, vbt;
2504		u32 pp_on, pp_off, pp_div;
2505
2506		pp_on = I915_READ(PCH_PP_ON_DELAYS);
2507		pp_off = I915_READ(PCH_PP_OFF_DELAYS);
2508		pp_div = I915_READ(PCH_PP_DIVISOR);
2509
2510		if (!pp_on || !pp_off || !pp_div) {
2511			DRM_INFO("bad panel power sequencing delays, disabling panel\n");
2512			intel_dp_encoder_destroy(&intel_dp->base.base);
2513			intel_dp_destroy(&intel_connector->base);
2514			return;
2515		}
2516
2517		/* Pull timing values out of registers */
2518		cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2519			PANEL_POWER_UP_DELAY_SHIFT;
2520
2521		cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2522			PANEL_LIGHT_ON_DELAY_SHIFT;
2523
2524		cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2525			PANEL_LIGHT_OFF_DELAY_SHIFT;
2526
2527		cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2528			PANEL_POWER_DOWN_DELAY_SHIFT;
2529
2530		cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2531			       PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
2532
2533		DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2534			      cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2535
2536		vbt = dev_priv->edp.pps;
2537
2538		DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2539			      vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
2540
2541#define get_delay(field)	((max(cur.field, vbt.field) + 9) / 10)
2542
2543		intel_dp->panel_power_up_delay = get_delay(t1_t3);
2544		intel_dp->backlight_on_delay = get_delay(t8);
2545		intel_dp->backlight_off_delay = get_delay(t9);
2546		intel_dp->panel_power_down_delay = get_delay(t10);
2547		intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
2548
2549		DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2550			      intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2551			      intel_dp->panel_power_cycle_delay);
2552
2553		DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2554			      intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2555
2556		ironlake_edp_panel_vdd_on(intel_dp);
2557		ret = intel_dp_get_dpcd(intel_dp);
2558		ironlake_edp_panel_vdd_off(intel_dp, false);
2559
2560		if (ret) {
2561			if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2562				dev_priv->no_aux_handshake =
2563					intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
2564					DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
2565		} else {
2566			/* if this fails, presume the device is a ghost */
2567			DRM_INFO("failed to retrieve link info, disabling eDP\n");
2568			intel_dp_encoder_destroy(&intel_dp->base.base);
2569			intel_dp_destroy(&intel_connector->base);
2570			return;
2571		}
2572	}
2573
2574	intel_dp_i2c_init(intel_dp, intel_connector, name);
2575
2576	intel_encoder->hot_plug = intel_dp_hot_plug;
2577
2578	if (is_edp(intel_dp)) {
2579		dev_priv->int_edp_connector = connector;
2580		intel_panel_setup_backlight(dev);
2581	}
2582
2583	intel_dp_add_properties(intel_dp, connector);
2584
2585	/* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2586	 * 0xd.  Failure to do so will result in spurious interrupts being
2587	 * generated on the port when a cable is not attached.
2588	 */
2589	if (IS_G4X(dev) && !IS_GM45(dev)) {
2590		u32 temp = I915_READ(PEG_BAND_GAP_DATA);
2591		I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
2592	}
2593}
2594