1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4 */
5
6#define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7
8#include <linux/delay.h>
9#include <linux/iopoll.h>
10#include <linux/platform_device.h>
11#include <linux/rational.h>
12#include <drm/display/drm_dp_helper.h>
13#include <drm/drm_print.h>
14
15#include "dp_catalog.h"
16#include "dp_reg.h"
17
18#define POLLING_SLEEP_US			1000
19#define POLLING_TIMEOUT_US			10000
20
21#define SCRAMBLER_RESET_COUNT_VALUE		0xFC
22
23#define DP_INTERRUPT_STATUS_ACK_SHIFT	1
24#define DP_INTERRUPT_STATUS_MASK_SHIFT	2
25
26#define DP_INTF_CONFIG_DATABUS_WIDEN     BIT(4)
27
28#define DP_INTERRUPT_STATUS1 \
29	(DP_INTR_AUX_XFER_DONE| \
30	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
31	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
32	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
33	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
34
35#define DP_INTERRUPT_STATUS1_ACK \
36	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
37#define DP_INTERRUPT_STATUS1_MASK \
38	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
39
40#define DP_INTERRUPT_STATUS2 \
41	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
42	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
43
44#define DP_INTERRUPT_STATUS2_ACK \
45	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46#define DP_INTERRUPT_STATUS2_MASK \
47	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48
49#define DP_INTERRUPT_STATUS4 \
50	(PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
51	PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
52
53#define DP_INTERRUPT_MASK4 \
54	(PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
55	PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
56
57#define DP_DEFAULT_AHB_OFFSET	0x0000
58#define DP_DEFAULT_AHB_SIZE	0x0200
59#define DP_DEFAULT_AUX_OFFSET	0x0200
60#define DP_DEFAULT_AUX_SIZE	0x0200
61#define DP_DEFAULT_LINK_OFFSET	0x0400
62#define DP_DEFAULT_LINK_SIZE	0x0C00
63#define DP_DEFAULT_P0_OFFSET	0x1000
64#define DP_DEFAULT_P0_SIZE	0x0400
65
66struct dss_io_region {
67	size_t len;
68	void __iomem *base;
69};
70
71struct dss_io_data {
72	struct dss_io_region ahb;
73	struct dss_io_region aux;
74	struct dss_io_region link;
75	struct dss_io_region p0;
76};
77
78struct dp_catalog_private {
79	struct device *dev;
80	struct drm_device *drm_dev;
81	struct dss_io_data io;
82	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
83	struct dp_catalog dp_catalog;
84};
85
86void dp_catalog_snapshot(struct dp_catalog *dp_catalog, struct msm_disp_state *disp_state)
87{
88	struct dp_catalog_private *catalog = container_of(dp_catalog,
89			struct dp_catalog_private, dp_catalog);
90	struct dss_io_data *dss = &catalog->io;
91
92	msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
93	msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
94	msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
95	msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
96}
97
98static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
99{
100	return readl_relaxed(catalog->io.aux.base + offset);
101}
102
103static inline void dp_write_aux(struct dp_catalog_private *catalog,
104			       u32 offset, u32 data)
105{
106	/*
107	 * To make sure aux reg writes happens before any other operation,
108	 * this function uses writel() instread of writel_relaxed()
109	 */
110	writel(data, catalog->io.aux.base + offset);
111}
112
113static inline u32 dp_read_ahb(const struct dp_catalog_private *catalog, u32 offset)
114{
115	return readl_relaxed(catalog->io.ahb.base + offset);
116}
117
118static inline void dp_write_ahb(struct dp_catalog_private *catalog,
119			       u32 offset, u32 data)
120{
121	/*
122	 * To make sure phy reg writes happens before any other operation,
123	 * this function uses writel() instread of writel_relaxed()
124	 */
125	writel(data, catalog->io.ahb.base + offset);
126}
127
128static inline void dp_write_p0(struct dp_catalog_private *catalog,
129			       u32 offset, u32 data)
130{
131	/*
132	 * To make sure interface reg writes happens before any other operation,
133	 * this function uses writel() instread of writel_relaxed()
134	 */
135	writel(data, catalog->io.p0.base + offset);
136}
137
138static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
139			       u32 offset)
140{
141	/*
142	 * To make sure interface reg writes happens before any other operation,
143	 * this function uses writel() instread of writel_relaxed()
144	 */
145	return readl_relaxed(catalog->io.p0.base + offset);
146}
147
148static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
149{
150	return readl_relaxed(catalog->io.link.base + offset);
151}
152
153static inline void dp_write_link(struct dp_catalog_private *catalog,
154			       u32 offset, u32 data)
155{
156	/*
157	 * To make sure link reg writes happens before any other operation,
158	 * this function uses writel() instread of writel_relaxed()
159	 */
160	writel(data, catalog->io.link.base + offset);
161}
162
163/* aux related catalog functions */
164u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
165{
166	struct dp_catalog_private *catalog = container_of(dp_catalog,
167				struct dp_catalog_private, dp_catalog);
168
169	return dp_read_aux(catalog, REG_DP_AUX_DATA);
170}
171
172int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog, u32 data)
173{
174	struct dp_catalog_private *catalog = container_of(dp_catalog,
175				struct dp_catalog_private, dp_catalog);
176
177	dp_write_aux(catalog, REG_DP_AUX_DATA, data);
178	return 0;
179}
180
181int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog, u32 data)
182{
183	struct dp_catalog_private *catalog = container_of(dp_catalog,
184				struct dp_catalog_private, dp_catalog);
185
186	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
187	return 0;
188}
189
190int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
191{
192	u32 data;
193	struct dp_catalog_private *catalog = container_of(dp_catalog,
194				struct dp_catalog_private, dp_catalog);
195
196	if (read) {
197		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
198		data &= ~DP_AUX_TRANS_CTRL_GO;
199		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
200	} else {
201		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
202	}
203	return 0;
204}
205
206int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
207{
208	struct dp_catalog_private *catalog = container_of(dp_catalog,
209				struct dp_catalog_private, dp_catalog);
210
211	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
212	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
213	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
214	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
215	return 0;
216}
217
218/**
219 * dp_catalog_aux_reset() - reset AUX controller
220 *
221 * @dp_catalog: DP catalog structure
222 *
223 * return: void
224 *
225 * This function reset AUX controller
226 *
227 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
228 *
229 */
230void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
231{
232	u32 aux_ctrl;
233	struct dp_catalog_private *catalog = container_of(dp_catalog,
234				struct dp_catalog_private, dp_catalog);
235
236	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
237
238	aux_ctrl |= DP_AUX_CTRL_RESET;
239	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
240	usleep_range(1000, 1100); /* h/w recommended delay */
241
242	aux_ctrl &= ~DP_AUX_CTRL_RESET;
243	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
244}
245
246void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
247{
248	u32 aux_ctrl;
249	struct dp_catalog_private *catalog = container_of(dp_catalog,
250				struct dp_catalog_private, dp_catalog);
251
252	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
253
254	if (enable) {
255		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
256		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
257		aux_ctrl |= DP_AUX_CTRL_ENABLE;
258	} else {
259		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
260	}
261
262	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
263}
264
265int dp_catalog_aux_wait_for_hpd_connect_state(struct dp_catalog *dp_catalog,
266					      unsigned long wait_us)
267{
268	u32 state;
269	struct dp_catalog_private *catalog = container_of(dp_catalog,
270				struct dp_catalog_private, dp_catalog);
271
272	/* poll for hpd connected status every 2ms and timeout after wait_us */
273	return readl_poll_timeout(catalog->io.aux.base +
274				REG_DP_DP_HPD_INT_STATUS,
275				state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
276				min(wait_us, 2000), wait_us);
277}
278
279static void dump_regs(void __iomem *base, int len)
280{
281	int i;
282	u32 x0, x4, x8, xc;
283	u32 addr_off = 0;
284
285	len = DIV_ROUND_UP(len, 16);
286	for (i = 0; i < len; i++) {
287		x0 = readl_relaxed(base + addr_off);
288		x4 = readl_relaxed(base + addr_off + 0x04);
289		x8 = readl_relaxed(base + addr_off + 0x08);
290		xc = readl_relaxed(base + addr_off + 0x0c);
291
292		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
293		addr_off += 16;
294	}
295}
296
297void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
298{
299	struct dp_catalog_private *catalog = container_of(dp_catalog,
300		struct dp_catalog_private, dp_catalog);
301	struct dss_io_data *io = &catalog->io;
302
303	pr_info("AHB regs\n");
304	dump_regs(io->ahb.base, io->ahb.len);
305
306	pr_info("AUXCLK regs\n");
307	dump_regs(io->aux.base, io->aux.len);
308
309	pr_info("LCLK regs\n");
310	dump_regs(io->link.base, io->link.len);
311
312	pr_info("P0CLK regs\n");
313	dump_regs(io->p0.base, io->p0.len);
314}
315
316u32 dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
317{
318	struct dp_catalog_private *catalog = container_of(dp_catalog,
319				struct dp_catalog_private, dp_catalog);
320	u32 intr, intr_ack;
321
322	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
323	intr &= ~DP_INTERRUPT_STATUS1_MASK;
324	intr_ack = (intr & DP_INTERRUPT_STATUS1)
325			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
326	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
327			DP_INTERRUPT_STATUS1_MASK);
328
329	return intr;
330
331}
332
333/* controller related catalog functions */
334void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
335				u32 dp_tu, u32 valid_boundary,
336				u32 valid_boundary2)
337{
338	struct dp_catalog_private *catalog = container_of(dp_catalog,
339				struct dp_catalog_private, dp_catalog);
340
341	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
342	dp_write_link(catalog, REG_DP_TU, dp_tu);
343	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
344}
345
346void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
347{
348	struct dp_catalog_private *catalog = container_of(dp_catalog,
349				struct dp_catalog_private, dp_catalog);
350
351	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
352}
353
354void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
355{
356	struct dp_catalog_private *catalog = container_of(dp_catalog,
357				struct dp_catalog_private, dp_catalog);
358
359	drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
360
361	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
362}
363
364void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
365{
366	struct dp_catalog_private *catalog = container_of(dp_catalog,
367				struct dp_catalog_private, dp_catalog);
368	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
369	u32 ln_mapping;
370
371	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
372	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
373	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
374	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
375
376	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
377			ln_mapping);
378}
379
380void dp_catalog_ctrl_psr_mainlink_enable(struct dp_catalog *dp_catalog,
381						bool enable)
382{
383	u32 val;
384	struct dp_catalog_private *catalog = container_of(dp_catalog,
385				struct dp_catalog_private, dp_catalog);
386
387	val = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
388
389	if (enable)
390		val |= DP_MAINLINK_CTRL_ENABLE;
391	else
392		val &= ~DP_MAINLINK_CTRL_ENABLE;
393
394	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
395}
396
397void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
398						bool enable)
399{
400	u32 mainlink_ctrl;
401	struct dp_catalog_private *catalog = container_of(dp_catalog,
402				struct dp_catalog_private, dp_catalog);
403
404	drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
405	if (enable) {
406		/*
407		 * To make sure link reg writes happens before other operation,
408		 * dp_write_link() function uses writel()
409		 */
410		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
411
412		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
413						DP_MAINLINK_CTRL_ENABLE);
414		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
415
416		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
417		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
418
419		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
420		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
421
422		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
423					DP_MAINLINK_FB_BOUNDARY_SEL);
424		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
425	} else {
426		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
427		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
428		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
429	}
430}
431
432void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
433					u32 colorimetry_cfg,
434					u32 test_bits_depth)
435{
436	u32 misc_val;
437	struct dp_catalog_private *catalog = container_of(dp_catalog,
438				struct dp_catalog_private, dp_catalog);
439
440	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
441
442	/* clear bpp bits */
443	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
444	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
445	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
446	/* Configure clock to synchronous mode */
447	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
448
449	drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
450	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
451}
452
453void dp_catalog_setup_peripheral_flush(struct dp_catalog *dp_catalog)
454{
455	u32 mainlink_ctrl, hw_revision;
456	struct dp_catalog_private *catalog = container_of(dp_catalog,
457				struct dp_catalog_private, dp_catalog);
458
459	mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
460
461	hw_revision = dp_catalog_hw_revision(dp_catalog);
462	if (hw_revision >= DP_HW_VERSION_1_2)
463		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
464	else
465		mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
466
467	dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
468}
469
470void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
471					u32 rate, u32 stream_rate_khz,
472					bool is_ycbcr_420)
473{
474	u32 pixel_m, pixel_n;
475	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
476	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
477	u32 const link_rate_hbr2 = 540000;
478	u32 const link_rate_hbr3 = 810000;
479	unsigned long den, num;
480
481	struct dp_catalog_private *catalog = container_of(dp_catalog,
482				struct dp_catalog_private, dp_catalog);
483
484	if (rate == link_rate_hbr3)
485		pixel_div = 6;
486	else if (rate == 162000 || rate == 270000)
487		pixel_div = 2;
488	else if (rate == link_rate_hbr2)
489		pixel_div = 4;
490	else
491		DRM_ERROR("Invalid pixel mux divider\n");
492
493	dispcc_input_rate = (rate * 10) / pixel_div;
494
495	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
496			(unsigned long)(1 << 16) - 1,
497			(unsigned long)(1 << 16) - 1, &den, &num);
498
499	den = ~(den - num);
500	den = den & 0xFFFF;
501	pixel_m = num;
502	pixel_n = den;
503
504	mvid = (pixel_m & 0xFFFF) * 5;
505	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
506
507	if (nvid < nvid_fixed) {
508		u32 temp;
509
510		temp = (nvid_fixed / nvid) * nvid;
511		mvid = (nvid_fixed / nvid) * mvid;
512		nvid = temp;
513	}
514
515	if (is_ycbcr_420)
516		mvid /= 2;
517
518	if (link_rate_hbr2 == rate)
519		nvid *= 2;
520
521	if (link_rate_hbr3 == rate)
522		nvid *= 3;
523
524	drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
525	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
526	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
527	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
528}
529
530int dp_catalog_ctrl_set_pattern_state_bit(struct dp_catalog *dp_catalog,
531					u32 state_bit)
532{
533	int bit, ret;
534	u32 data;
535	struct dp_catalog_private *catalog = container_of(dp_catalog,
536				struct dp_catalog_private, dp_catalog);
537
538	bit = BIT(state_bit - 1);
539	drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
540	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
541
542	bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
543
544	/* Poll for mainlink ready status */
545	ret = readx_poll_timeout(readl, catalog->io.link.base +
546					REG_DP_MAINLINK_READY,
547					data, data & bit,
548					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
549	if (ret < 0) {
550		DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
551		return ret;
552	}
553	return 0;
554}
555
556/**
557 * dp_catalog_hw_revision() - retrieve DP hw revision
558 *
559 * @dp_catalog: DP catalog structure
560 *
561 * Return: DP controller hw revision
562 *
563 */
564u32 dp_catalog_hw_revision(const struct dp_catalog *dp_catalog)
565{
566	const struct dp_catalog_private *catalog = container_of(dp_catalog,
567				struct dp_catalog_private, dp_catalog);
568
569	return dp_read_ahb(catalog, REG_DP_HW_VERSION);
570}
571
572/**
573 * dp_catalog_ctrl_reset() - reset DP controller
574 *
575 * @dp_catalog: DP catalog structure
576 *
577 * return: void
578 *
579 * This function reset the DP controller
580 *
581 * NOTE: reset DP controller will also clear any pending HPD related interrupts
582 *
583 */
584void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
585{
586	u32 sw_reset;
587	struct dp_catalog_private *catalog = container_of(dp_catalog,
588				struct dp_catalog_private, dp_catalog);
589
590	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
591
592	sw_reset |= DP_SW_RESET;
593	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
594	usleep_range(1000, 1100); /* h/w recommended delay */
595
596	sw_reset &= ~DP_SW_RESET;
597	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
598}
599
600bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
601{
602	u32 data;
603	int ret;
604	struct dp_catalog_private *catalog = container_of(dp_catalog,
605				struct dp_catalog_private, dp_catalog);
606
607	/* Poll for mainlink ready status */
608	ret = readl_poll_timeout(catalog->io.link.base +
609				REG_DP_MAINLINK_READY,
610				data, data & DP_MAINLINK_READY_FOR_VIDEO,
611				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
612	if (ret < 0) {
613		DRM_ERROR("mainlink not ready\n");
614		return false;
615	}
616
617	return true;
618}
619
620void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
621						bool enable)
622{
623	struct dp_catalog_private *catalog = container_of(dp_catalog,
624				struct dp_catalog_private, dp_catalog);
625
626	if (enable) {
627		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
628				DP_INTERRUPT_STATUS1_MASK);
629		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
630				DP_INTERRUPT_STATUS2_MASK);
631	} else {
632		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
633		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
634	}
635}
636
637void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
638			u32 intr_mask, bool en)
639{
640	struct dp_catalog_private *catalog = container_of(dp_catalog,
641				struct dp_catalog_private, dp_catalog);
642
643	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
644
645	config = (en ? config | intr_mask : config & ~intr_mask);
646
647	drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
648					intr_mask, config);
649	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
650				config & DP_DP_HPD_INT_MASK);
651}
652
653void dp_catalog_ctrl_hpd_enable(struct dp_catalog *dp_catalog)
654{
655	struct dp_catalog_private *catalog = container_of(dp_catalog,
656				struct dp_catalog_private, dp_catalog);
657
658	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
659
660	/* Configure REFTIMER and enable it */
661	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
662	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
663
664	/* Enable HPD */
665	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
666}
667
668void dp_catalog_ctrl_hpd_disable(struct dp_catalog *dp_catalog)
669{
670	struct dp_catalog_private *catalog = container_of(dp_catalog,
671				struct dp_catalog_private, dp_catalog);
672
673	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
674
675	reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
676	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
677
678	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
679}
680
681static void dp_catalog_enable_sdp(struct dp_catalog_private *catalog)
682{
683	/* trigger sdp */
684	dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
685	dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
686}
687
688void dp_catalog_ctrl_config_psr(struct dp_catalog *dp_catalog)
689{
690	struct dp_catalog_private *catalog = container_of(dp_catalog,
691				struct dp_catalog_private, dp_catalog);
692	u32 config;
693
694	/* enable PSR1 function */
695	config = dp_read_link(catalog, REG_PSR_CONFIG);
696	config |= PSR1_SUPPORTED;
697	dp_write_link(catalog, REG_PSR_CONFIG, config);
698
699	dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
700	dp_catalog_enable_sdp(catalog);
701}
702
703void dp_catalog_ctrl_set_psr(struct dp_catalog *dp_catalog, bool enter)
704{
705	struct dp_catalog_private *catalog = container_of(dp_catalog,
706			struct dp_catalog_private, dp_catalog);
707	u32 cmd;
708
709	cmd = dp_read_link(catalog, REG_PSR_CMD);
710
711	cmd &= ~(PSR_ENTER | PSR_EXIT);
712
713	if (enter)
714		cmd |= PSR_ENTER;
715	else
716		cmd |= PSR_EXIT;
717
718	dp_catalog_enable_sdp(catalog);
719	dp_write_link(catalog, REG_PSR_CMD, cmd);
720}
721
722u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
723{
724	struct dp_catalog_private *catalog = container_of(dp_catalog,
725				struct dp_catalog_private, dp_catalog);
726	u32 status;
727
728	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
729	drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
730	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
731	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
732
733	return status;
734}
735
736u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
737{
738	struct dp_catalog_private *catalog = container_of(dp_catalog,
739				struct dp_catalog_private, dp_catalog);
740	int isr, mask;
741
742	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
743	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
744				 (isr & DP_DP_HPD_INT_MASK));
745	mask = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
746
747	/*
748	 * We only want to return interrupts that are unmasked to the caller.
749	 * However, the interrupt status field also contains other
750	 * informational bits about the HPD state status, so we only mask
751	 * out the part of the register that tells us about which interrupts
752	 * are pending.
753	 */
754	return isr & (mask | ~DP_DP_HPD_INT_MASK);
755}
756
757u32 dp_catalog_ctrl_read_psr_interrupt_status(struct dp_catalog *dp_catalog)
758{
759	struct dp_catalog_private *catalog = container_of(dp_catalog,
760				struct dp_catalog_private, dp_catalog);
761	u32 intr, intr_ack;
762
763	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
764	intr_ack = (intr & DP_INTERRUPT_STATUS4)
765			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
766	dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
767
768	return intr;
769}
770
771int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
772{
773	struct dp_catalog_private *catalog = container_of(dp_catalog,
774				struct dp_catalog_private, dp_catalog);
775	u32 intr, intr_ack;
776
777	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
778	intr &= ~DP_INTERRUPT_STATUS2_MASK;
779	intr_ack = (intr & DP_INTERRUPT_STATUS2)
780			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
781	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
782			intr_ack | DP_INTERRUPT_STATUS2_MASK);
783
784	return intr;
785}
786
787void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
788{
789	struct dp_catalog_private *catalog = container_of(dp_catalog,
790				struct dp_catalog_private, dp_catalog);
791
792	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
793			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
794	usleep_range(1000, 1100); /* h/w recommended delay */
795	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
796}
797
798void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
799			u32 pattern)
800{
801	struct dp_catalog_private *catalog = container_of(dp_catalog,
802				struct dp_catalog_private, dp_catalog);
803	u32 value = 0x0;
804
805	/* Make sure to clear the current pattern before starting a new one */
806	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
807
808	drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
809	switch (pattern) {
810	case DP_PHY_TEST_PATTERN_D10_2:
811		dp_write_link(catalog, REG_DP_STATE_CTRL,
812				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
813		break;
814	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
815		value &= ~(1 << 16);
816		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
817					value);
818		value |= SCRAMBLER_RESET_COUNT_VALUE;
819		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
820					value);
821		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
822					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
823		dp_write_link(catalog, REG_DP_STATE_CTRL,
824					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
825		break;
826	case DP_PHY_TEST_PATTERN_PRBS7:
827		dp_write_link(catalog, REG_DP_STATE_CTRL,
828				DP_STATE_CTRL_LINK_PRBS7);
829		break;
830	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
831		dp_write_link(catalog, REG_DP_STATE_CTRL,
832				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
833		/* 00111110000011111000001111100000 */
834		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
835				0x3E0F83E0);
836		/* 00001111100000111110000011111000 */
837		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
838				0x0F83E0F8);
839		/* 1111100000111110 */
840		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
841				0x0000F83E);
842		break;
843	case DP_PHY_TEST_PATTERN_CP2520:
844		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
845		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
846		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
847
848		value = DP_HBR2_ERM_PATTERN;
849		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
850				value);
851		value |= SCRAMBLER_RESET_COUNT_VALUE;
852		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
853					value);
854		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
855					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
856		dp_write_link(catalog, REG_DP_STATE_CTRL,
857					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
858		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
859		value |= DP_MAINLINK_CTRL_ENABLE;
860		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
861		break;
862	case DP_PHY_TEST_PATTERN_SEL_MASK:
863		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
864				DP_MAINLINK_CTRL_ENABLE);
865		dp_write_link(catalog, REG_DP_STATE_CTRL,
866				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
867		break;
868	default:
869		drm_dbg_dp(catalog->drm_dev,
870				"No valid test pattern requested: %#x\n", pattern);
871		break;
872	}
873}
874
875u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
876{
877	struct dp_catalog_private *catalog = container_of(dp_catalog,
878				struct dp_catalog_private, dp_catalog);
879
880	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
881}
882
883/* panel related catalog functions */
884int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog, u32 total,
885				u32 sync_start, u32 width_blanking, u32 dp_active)
886{
887	struct dp_catalog_private *catalog = container_of(dp_catalog,
888				struct dp_catalog_private, dp_catalog);
889	u32 reg;
890
891	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, total);
892	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, sync_start);
893	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, width_blanking);
894	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_active);
895
896	reg = dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
897
898	if (dp_catalog->wide_bus_en)
899		reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
900	else
901		reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
902
903
904	DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", dp_catalog->wide_bus_en, reg);
905
906	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
907	return 0;
908}
909
910static void dp_catalog_panel_send_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
911{
912	struct dp_catalog_private *catalog;
913	u32 header[2];
914	u32 val;
915	int i;
916
917	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
918
919	dp_utils_pack_sdp_header(&vsc_sdp->sdp_header, header);
920
921	dp_write_link(catalog, MMSS_DP_GENERIC0_0, header[0]);
922	dp_write_link(catalog, MMSS_DP_GENERIC0_1, header[1]);
923
924	for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
925		val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
926		       (vsc_sdp->db[i + 3] << 24));
927		dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, val);
928	}
929}
930
931static void dp_catalog_panel_update_sdp(struct dp_catalog *dp_catalog)
932{
933	struct dp_catalog_private *catalog;
934	u32 hw_revision;
935
936	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
937
938	hw_revision = dp_catalog_hw_revision(dp_catalog);
939	if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
940		dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x01);
941		dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x00);
942	}
943}
944
945void dp_catalog_panel_enable_vsc_sdp(struct dp_catalog *dp_catalog, struct dp_sdp *vsc_sdp)
946{
947	struct dp_catalog_private *catalog;
948	u32 cfg, cfg2, misc;
949
950	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
951
952	cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
953	cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
954	misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
955
956	cfg |= GEN0_SDP_EN;
957	dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
958
959	cfg2 |= GENERIC0_SDPSIZE_VALID;
960	dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
961
962	dp_catalog_panel_send_vsc_sdp(dp_catalog, vsc_sdp);
963
964	/* indicates presence of VSC (BIT(6) of MISC1) */
965	misc |= DP_MISC1_VSC_SDP;
966
967	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
968
969	pr_debug("misc settings = 0x%x\n", misc);
970	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
971
972	dp_catalog_panel_update_sdp(dp_catalog);
973}
974
975void dp_catalog_panel_disable_vsc_sdp(struct dp_catalog *dp_catalog)
976{
977	struct dp_catalog_private *catalog;
978	u32 cfg, cfg2, misc;
979
980	catalog = container_of(dp_catalog, struct dp_catalog_private, dp_catalog);
981
982	cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
983	cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
984	misc = dp_read_link(catalog, REG_DP_MISC1_MISC0);
985
986	cfg &= ~GEN0_SDP_EN;
987	dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
988
989	cfg2 &= ~GENERIC0_SDPSIZE_VALID;
990	dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
991
992	/* switch back to MSA */
993	misc &= ~DP_MISC1_VSC_SDP;
994
995	drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
996
997	pr_debug("misc settings = 0x%x\n", misc);
998	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
999
1000	dp_catalog_panel_update_sdp(dp_catalog);
1001}
1002
1003void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
1004				struct drm_display_mode *drm_mode)
1005{
1006	struct dp_catalog_private *catalog = container_of(dp_catalog,
1007				struct dp_catalog_private, dp_catalog);
1008	u32 hsync_period, vsync_period;
1009	u32 display_v_start, display_v_end;
1010	u32 hsync_start_x, hsync_end_x;
1011	u32 v_sync_width;
1012	u32 hsync_ctl;
1013	u32 display_hctl;
1014
1015	/* TPG config parameters*/
1016	hsync_period = drm_mode->htotal;
1017	vsync_period = drm_mode->vtotal;
1018
1019	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
1020					hsync_period);
1021	display_v_end = ((vsync_period - (drm_mode->vsync_start -
1022					drm_mode->vdisplay))
1023					* hsync_period) - 1;
1024
1025	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
1026	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
1027
1028	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
1029	hsync_end_x = hsync_period - (drm_mode->hsync_start -
1030					drm_mode->hdisplay) - 1;
1031
1032	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
1033
1034	hsync_ctl = (hsync_period << 16) |
1035			(drm_mode->hsync_end - drm_mode->hsync_start);
1036	display_hctl = (hsync_end_x << 16) | hsync_start_x;
1037
1038
1039	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
1040	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
1041	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
1042			hsync_period);
1043	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
1044			hsync_period);
1045	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
1046	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
1047	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
1048	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
1049	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
1050	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
1051	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
1052	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
1053	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
1054	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
1055	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
1056	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
1057	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
1058
1059	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1060				DP_TPG_CHECKERED_RECT_PATTERN);
1061	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1062				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1063				DP_TPG_VIDEO_CONFIG_RGB);
1064	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1065				DP_BIST_ENABLE_DPBIST_EN);
1066	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1067				DP_TIMING_ENGINE_EN_EN);
1068	drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1069}
1070
1071void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
1072{
1073	struct dp_catalog_private *catalog = container_of(dp_catalog,
1074				struct dp_catalog_private, dp_catalog);
1075
1076	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
1077	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
1078	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
1079}
1080
1081static void __iomem *dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1082{
1083	struct resource *res;
1084	void __iomem *base;
1085
1086	base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
1087	if (!IS_ERR(base))
1088		*len = resource_size(res);
1089
1090	return base;
1091}
1092
1093static int dp_catalog_get_io(struct dp_catalog_private *catalog)
1094{
1095	struct platform_device *pdev = to_platform_device(catalog->dev);
1096	struct dss_io_data *dss = &catalog->io;
1097
1098	dss->ahb.base = dp_ioremap(pdev, 0, &dss->ahb.len);
1099	if (IS_ERR(dss->ahb.base))
1100		return PTR_ERR(dss->ahb.base);
1101
1102	dss->aux.base = dp_ioremap(pdev, 1, &dss->aux.len);
1103	if (IS_ERR(dss->aux.base)) {
1104		/*
1105		 * The initial binding had a single reg, but in order to
1106		 * support variation in the sub-region sizes this was split.
1107		 * dp_ioremap() will fail with -EINVAL here if only a single
1108		 * reg is specified, so fill in the sub-region offsets and
1109		 * lengths based on this single region.
1110		 */
1111		if (PTR_ERR(dss->aux.base) == -EINVAL) {
1112			if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1113				DRM_ERROR("legacy memory region not large enough\n");
1114				return -EINVAL;
1115			}
1116
1117			dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1118			dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1119			dss->aux.len = DP_DEFAULT_AUX_SIZE;
1120			dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1121			dss->link.len = DP_DEFAULT_LINK_SIZE;
1122			dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1123			dss->p0.len = DP_DEFAULT_P0_SIZE;
1124		} else {
1125			DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1126			return PTR_ERR(dss->aux.base);
1127		}
1128	} else {
1129		dss->link.base = dp_ioremap(pdev, 2, &dss->link.len);
1130		if (IS_ERR(dss->link.base)) {
1131			DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1132			return PTR_ERR(dss->link.base);
1133		}
1134
1135		dss->p0.base = dp_ioremap(pdev, 3, &dss->p0.len);
1136		if (IS_ERR(dss->p0.base)) {
1137			DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1138			return PTR_ERR(dss->p0.base);
1139		}
1140	}
1141
1142	return 0;
1143}
1144
1145struct dp_catalog *dp_catalog_get(struct device *dev)
1146{
1147	struct dp_catalog_private *catalog;
1148	int ret;
1149
1150	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1151	if (!catalog)
1152		return ERR_PTR(-ENOMEM);
1153
1154	catalog->dev = dev;
1155
1156	ret = dp_catalog_get_io(catalog);
1157	if (ret)
1158		return ERR_PTR(ret);
1159
1160	return &catalog->dp_catalog;
1161}
1162
1163u32 dp_catalog_audio_get_header(struct dp_catalog *dp_catalog,
1164				enum dp_catalog_audio_sdp_type sdp,
1165				enum dp_catalog_audio_header_type header)
1166{
1167	struct dp_catalog_private *catalog;
1168	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1169
1170	catalog = container_of(dp_catalog,
1171		struct dp_catalog_private, dp_catalog);
1172
1173	sdp_map = catalog->audio_map;
1174
1175	return dp_read_link(catalog, sdp_map[sdp][header]);
1176}
1177
1178void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog,
1179				 enum dp_catalog_audio_sdp_type sdp,
1180				 enum dp_catalog_audio_header_type header,
1181				 u32 data)
1182{
1183	struct dp_catalog_private *catalog;
1184	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1185
1186	if (!dp_catalog)
1187		return;
1188
1189	catalog = container_of(dp_catalog,
1190		struct dp_catalog_private, dp_catalog);
1191
1192	sdp_map = catalog->audio_map;
1193
1194	dp_write_link(catalog, sdp_map[sdp][header], data);
1195}
1196
1197void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog, u32 select)
1198{
1199	struct dp_catalog_private *catalog;
1200	u32 acr_ctrl;
1201
1202	if (!dp_catalog)
1203		return;
1204
1205	catalog = container_of(dp_catalog,
1206		struct dp_catalog_private, dp_catalog);
1207
1208	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1209
1210	drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1211					select, acr_ctrl);
1212
1213	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1214}
1215
1216void dp_catalog_audio_enable(struct dp_catalog *dp_catalog, bool enable)
1217{
1218	struct dp_catalog_private *catalog;
1219	u32 audio_ctrl;
1220
1221	if (!dp_catalog)
1222		return;
1223
1224	catalog = container_of(dp_catalog,
1225		struct dp_catalog_private, dp_catalog);
1226
1227	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1228
1229	if (enable)
1230		audio_ctrl |= BIT(0);
1231	else
1232		audio_ctrl &= ~BIT(0);
1233
1234	drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1235
1236	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1237	/* make sure audio engine is disabled */
1238	wmb();
1239}
1240
1241void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
1242{
1243	struct dp_catalog_private *catalog;
1244	u32 sdp_cfg = 0;
1245	u32 sdp_cfg2 = 0;
1246
1247	if (!dp_catalog)
1248		return;
1249
1250	catalog = container_of(dp_catalog,
1251		struct dp_catalog_private, dp_catalog);
1252
1253	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
1254	/* AUDIO_TIMESTAMP_SDP_EN */
1255	sdp_cfg |= BIT(1);
1256	/* AUDIO_STREAM_SDP_EN */
1257	sdp_cfg |= BIT(2);
1258	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
1259	sdp_cfg |= BIT(5);
1260	/* AUDIO_ISRC_SDP_EN  */
1261	sdp_cfg |= BIT(6);
1262	/* AUDIO_INFOFRAME_SDP_EN  */
1263	sdp_cfg |= BIT(20);
1264
1265	drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1266
1267	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1268
1269	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1270	/* IFRM_REGSRC -> Do not use reg values */
1271	sdp_cfg2 &= ~BIT(0);
1272	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1273	sdp_cfg2 &= ~BIT(1);
1274
1275	drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1276
1277	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1278}
1279
1280void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
1281{
1282	struct dp_catalog_private *catalog;
1283
1284	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1285		{
1286			MMSS_DP_AUDIO_STREAM_0,
1287			MMSS_DP_AUDIO_STREAM_1,
1288			MMSS_DP_AUDIO_STREAM_1,
1289		},
1290		{
1291			MMSS_DP_AUDIO_TIMESTAMP_0,
1292			MMSS_DP_AUDIO_TIMESTAMP_1,
1293			MMSS_DP_AUDIO_TIMESTAMP_1,
1294		},
1295		{
1296			MMSS_DP_AUDIO_INFOFRAME_0,
1297			MMSS_DP_AUDIO_INFOFRAME_1,
1298			MMSS_DP_AUDIO_INFOFRAME_1,
1299		},
1300		{
1301			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1302			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1303			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1304		},
1305		{
1306			MMSS_DP_AUDIO_ISRC_0,
1307			MMSS_DP_AUDIO_ISRC_1,
1308			MMSS_DP_AUDIO_ISRC_1,
1309		},
1310	};
1311
1312	if (!dp_catalog)
1313		return;
1314
1315	catalog = container_of(dp_catalog,
1316		struct dp_catalog_private, dp_catalog);
1317
1318	catalog->audio_map = sdp_map;
1319}
1320
1321void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog, u32 safe_to_exit_level)
1322{
1323	struct dp_catalog_private *catalog;
1324	u32 mainlink_levels;
1325
1326	if (!dp_catalog)
1327		return;
1328
1329	catalog = container_of(dp_catalog,
1330		struct dp_catalog_private, dp_catalog);
1331
1332	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1333	mainlink_levels &= 0xFE0;
1334	mainlink_levels |= safe_to_exit_level;
1335
1336	drm_dbg_dp(catalog->drm_dev,
1337			"mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1338			 mainlink_levels, safe_to_exit_level);
1339
1340	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1341}
1342