1/*
2 *  linux/arch/arm/mach-pnx4008/dma.c
3 *
4 *  PNX4008 DMA registration and IRQ dispatching
5 *
6 *  Author:	Vitaly Wool
7 *  Copyright:	MontaVista Software Inc. (c) 2005
8 *
9 *  Based on the code from Nicolas Pitre
10 *
11 *  This program is free software; you can redistribute it and/or modify
12 *  it under the terms of the GNU General Public License version 2 as
13 *  published by the Free Software Foundation.
14 */
15
16#include <linux/module.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <linux/interrupt.h>
20#include <linux/errno.h>
21#include <linux/err.h>
22#include <linux/dma-mapping.h>
23#include <linux/clk.h>
24
25#include <asm/system.h>
26#include <asm/hardware.h>
27#include <asm/dma.h>
28#include <asm/dma-mapping.h>
29#include <asm/io.h>
30#include <asm/mach/dma.h>
31#include <asm/arch/clock.h>
32
33static struct dma_channel {
34	char *name;
35	void (*irq_handler) (int, int, void *);
36	void *data;
37	struct pnx4008_dma_ll *ll;
38	u32 ll_dma;
39	void *target_addr;
40	int target_id;
41} dma_channels[MAX_DMA_CHANNELS];
42
43static struct ll_pool {
44	void *vaddr;
45	void *cur;
46	dma_addr_t dma_addr;
47	int count;
48} ll_pool;
49
50static DEFINE_SPINLOCK(ll_lock);
51
52struct pnx4008_dma_ll *pnx4008_alloc_ll_entry(dma_addr_t * ll_dma)
53{
54	struct pnx4008_dma_ll *ll = NULL;
55	unsigned long flags;
56
57	spin_lock_irqsave(&ll_lock, flags);
58	if (ll_pool.count > 4) { /* can give one more */
59		ll = *(struct pnx4008_dma_ll **) ll_pool.cur;
60		*ll_dma = ll_pool.dma_addr + ((void *)ll - ll_pool.vaddr);
61		*(void **)ll_pool.cur = **(void ***)ll_pool.cur;
62		memset(ll, 0, sizeof(*ll));
63		ll_pool.count--;
64	}
65	spin_unlock_irqrestore(&ll_lock, flags);
66
67	return ll;
68}
69
70EXPORT_SYMBOL_GPL(pnx4008_alloc_ll_entry);
71
72void pnx4008_free_ll_entry(struct pnx4008_dma_ll * ll, dma_addr_t ll_dma)
73{
74	unsigned long flags;
75
76	if (ll) {
77		if ((unsigned long)((long)ll - (long)ll_pool.vaddr) > 0x4000) {
78			printk(KERN_ERR "Trying to free entry not allocated by DMA\n");
79			BUG();
80		}
81
82		if (ll->flags & DMA_BUFFER_ALLOCATED)
83			ll->free(ll->alloc_data);
84
85		spin_lock_irqsave(&ll_lock, flags);
86		*(long *)ll = *(long *)ll_pool.cur;
87		*(long *)ll_pool.cur = (long)ll;
88		ll_pool.count++;
89		spin_unlock_irqrestore(&ll_lock, flags);
90	}
91}
92
93EXPORT_SYMBOL_GPL(pnx4008_free_ll_entry);
94
95void pnx4008_free_ll(u32 ll_dma, struct pnx4008_dma_ll * ll)
96{
97	struct pnx4008_dma_ll *ptr;
98	u32 dma;
99
100	while (ll) {
101		dma = ll->next_dma;
102		ptr = ll->next;
103		pnx4008_free_ll_entry(ll, ll_dma);
104
105		ll_dma = dma;
106		ll = ptr;
107	}
108}
109
110EXPORT_SYMBOL_GPL(pnx4008_free_ll);
111
112static int dma_channels_requested = 0;
113
114static inline void dma_increment_usage(void)
115{
116	if (!dma_channels_requested++) {
117		struct clk *clk = clk_get(0, "dma_ck");
118		if (!IS_ERR(clk)) {
119			clk_set_rate(clk, 1);
120			clk_put(clk);
121		}
122		pnx4008_config_dma(-1, -1, 1);
123	}
124}
125static inline void dma_decrement_usage(void)
126{
127	if (!--dma_channels_requested) {
128		struct clk *clk = clk_get(0, "dma_ck");
129		if (!IS_ERR(clk)) {
130			clk_set_rate(clk, 0);
131			clk_put(clk);
132		}
133		pnx4008_config_dma(-1, -1, 0);
134
135	}
136}
137
138static DEFINE_SPINLOCK(dma_lock);
139
140static inline void pnx4008_dma_lock(void)
141{
142	spin_lock_irq(&dma_lock);
143}
144
145static inline void pnx4008_dma_unlock(void)
146{
147	spin_unlock_irq(&dma_lock);
148}
149
150#define VALID_CHANNEL(c)	(((c) >= 0) && ((c) < MAX_DMA_CHANNELS))
151
152int pnx4008_request_channel(char *name, int ch,
153			    void (*irq_handler) (int, int, void *), void *data)
154{
155	int i, found = 0;
156
157	/* basic sanity checks */
158	if (!name || (ch != -1 && !VALID_CHANNEL(ch)))
159		return -EINVAL;
160
161	pnx4008_dma_lock();
162
163	/* try grabbing a DMA channel with the requested priority */
164	for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
165		if (!dma_channels[i].name && (ch == -1 || ch == i)) {
166			found = 1;
167			break;
168		}
169	}
170
171	if (found) {
172		dma_increment_usage();
173		dma_channels[i].name = name;
174		dma_channels[i].irq_handler = irq_handler;
175		dma_channels[i].data = data;
176		dma_channels[i].ll = NULL;
177		dma_channels[i].ll_dma = 0;
178	} else {
179		printk(KERN_WARNING "No more available DMA channels for %s\n",
180		       name);
181		i = -ENODEV;
182	}
183
184	pnx4008_dma_unlock();
185	return i;
186}
187
188EXPORT_SYMBOL_GPL(pnx4008_request_channel);
189
190void pnx4008_free_channel(int ch)
191{
192	if (!dma_channels[ch].name) {
193		printk(KERN_CRIT
194		       "%s: trying to free channel %d which is already freed\n",
195		       __FUNCTION__, ch);
196		return;
197	}
198
199	pnx4008_dma_lock();
200	pnx4008_free_ll(dma_channels[ch].ll_dma, dma_channels[ch].ll);
201	dma_channels[ch].ll = NULL;
202	dma_decrement_usage();
203
204	dma_channels[ch].name = NULL;
205	pnx4008_dma_unlock();
206}
207
208EXPORT_SYMBOL_GPL(pnx4008_free_channel);
209
210int pnx4008_config_dma(int ahb_m1_be, int ahb_m2_be, int enable)
211{
212	unsigned long dma_cfg = __raw_readl(DMAC_CONFIG);
213
214	switch (ahb_m1_be) {
215	case 0:
216		dma_cfg &= ~(1 << 1);
217		break;
218	case 1:
219		dma_cfg |= (1 << 1);
220		break;
221	default:
222		break;
223	}
224
225	switch (ahb_m2_be) {
226	case 0:
227		dma_cfg &= ~(1 << 2);
228		break;
229	case 1:
230		dma_cfg |= (1 << 2);
231		break;
232	default:
233		break;
234	}
235
236	switch (enable) {
237	case 0:
238		dma_cfg &= ~(1 << 0);
239		break;
240	case 1:
241		dma_cfg |= (1 << 0);
242		break;
243	default:
244		break;
245	}
246
247	pnx4008_dma_lock();
248	__raw_writel(dma_cfg, DMAC_CONFIG);
249	pnx4008_dma_unlock();
250
251	return 0;
252}
253
254EXPORT_SYMBOL_GPL(pnx4008_config_dma);
255
256int pnx4008_dma_pack_control(const struct pnx4008_dma_ch_ctrl * ch_ctrl,
257			     unsigned long *ctrl)
258{
259	int i = 0, dbsize, sbsize, err = 0;
260
261	if (!ctrl || !ch_ctrl) {
262		err = -EINVAL;
263		goto out;
264	}
265
266	*ctrl = 0;
267
268	switch (ch_ctrl->tc_mask) {
269	case 0:
270		break;
271	case 1:
272		*ctrl |= (1 << 31);
273		break;
274
275	default:
276		err = -EINVAL;
277		goto out;
278	}
279
280	switch (ch_ctrl->cacheable) {
281	case 0:
282		break;
283	case 1:
284		*ctrl |= (1 << 30);
285		break;
286
287	default:
288		err = -EINVAL;
289		goto out;
290	}
291	switch (ch_ctrl->bufferable) {
292	case 0:
293		break;
294	case 1:
295		*ctrl |= (1 << 29);
296		break;
297
298	default:
299		err = -EINVAL;
300		goto out;
301	}
302	switch (ch_ctrl->priv_mode) {
303	case 0:
304		break;
305	case 1:
306		*ctrl |= (1 << 28);
307		break;
308
309	default:
310		err = -EINVAL;
311		goto out;
312	}
313	switch (ch_ctrl->di) {
314	case 0:
315		break;
316	case 1:
317		*ctrl |= (1 << 27);
318		break;
319
320	default:
321		err = -EINVAL;
322		goto out;
323	}
324	switch (ch_ctrl->si) {
325	case 0:
326		break;
327	case 1:
328		*ctrl |= (1 << 26);
329		break;
330
331	default:
332		err = -EINVAL;
333		goto out;
334	}
335	switch (ch_ctrl->dest_ahb1) {
336	case 0:
337		break;
338	case 1:
339		*ctrl |= (1 << 25);
340		break;
341
342	default:
343		err = -EINVAL;
344		goto out;
345	}
346	switch (ch_ctrl->src_ahb1) {
347	case 0:
348		break;
349	case 1:
350		*ctrl |= (1 << 24);
351		break;
352
353	default:
354		err = -EINVAL;
355		goto out;
356	}
357	switch (ch_ctrl->dwidth) {
358	case WIDTH_BYTE:
359		*ctrl &= ~(7 << 21);
360		break;
361	case WIDTH_HWORD:
362		*ctrl &= ~(7 << 21);
363		*ctrl |= (1 << 21);
364		break;
365	case WIDTH_WORD:
366		*ctrl &= ~(7 << 21);
367		*ctrl |= (2 << 21);
368		break;
369
370	default:
371		err = -EINVAL;
372		goto out;
373	}
374	switch (ch_ctrl->swidth) {
375	case WIDTH_BYTE:
376		*ctrl &= ~(7 << 18);
377		break;
378	case WIDTH_HWORD:
379		*ctrl &= ~(7 << 18);
380		*ctrl |= (1 << 18);
381		break;
382	case WIDTH_WORD:
383		*ctrl &= ~(7 << 18);
384		*ctrl |= (2 << 18);
385		break;
386
387	default:
388		err = -EINVAL;
389		goto out;
390	}
391	dbsize = ch_ctrl->dbsize;
392	while (!(dbsize & 1)) {
393		i++;
394		dbsize >>= 1;
395	}
396	if (ch_ctrl->dbsize != 1 || i > 8 || i == 1) {
397		err = -EINVAL;
398		goto out;
399	} else if (i > 1)
400		i--;
401	*ctrl &= ~(7 << 15);
402	*ctrl |= (i << 15);
403
404	sbsize = ch_ctrl->sbsize;
405	while (!(sbsize & 1)) {
406		i++;
407		sbsize >>= 1;
408	}
409	if (ch_ctrl->sbsize != 1 || i > 8 || i == 1) {
410		err = -EINVAL;
411		goto out;
412	} else if (i > 1)
413		i--;
414	*ctrl &= ~(7 << 12);
415	*ctrl |= (i << 12);
416
417	if (ch_ctrl->tr_size > 0x7ff) {
418		err = -E2BIG;
419		goto out;
420	}
421	*ctrl &= ~0x7ff;
422	*ctrl |= ch_ctrl->tr_size & 0x7ff;
423
424out:
425	return err;
426}
427
428EXPORT_SYMBOL_GPL(pnx4008_dma_pack_control);
429
430int pnx4008_dma_parse_control(unsigned long ctrl,
431			      struct pnx4008_dma_ch_ctrl * ch_ctrl)
432{
433	int err = 0;
434
435	if (!ch_ctrl) {
436		err = -EINVAL;
437		goto out;
438	}
439
440	ch_ctrl->tr_size = ctrl & 0x7ff;
441	ctrl >>= 12;
442
443	ch_ctrl->sbsize = 1 << (ctrl & 7);
444	if (ch_ctrl->sbsize > 1)
445		ch_ctrl->sbsize <<= 1;
446	ctrl >>= 3;
447
448	ch_ctrl->dbsize = 1 << (ctrl & 7);
449	if (ch_ctrl->dbsize > 1)
450		ch_ctrl->dbsize <<= 1;
451	ctrl >>= 3;
452
453	switch (ctrl & 7) {
454	case 0:
455		ch_ctrl->swidth = WIDTH_BYTE;
456		break;
457	case 1:
458		ch_ctrl->swidth = WIDTH_HWORD;
459		break;
460	case 2:
461		ch_ctrl->swidth = WIDTH_WORD;
462		break;
463	default:
464		err = -EINVAL;
465		goto out;
466	}
467	ctrl >>= 3;
468
469	switch (ctrl & 7) {
470	case 0:
471		ch_ctrl->dwidth = WIDTH_BYTE;
472		break;
473	case 1:
474		ch_ctrl->dwidth = WIDTH_HWORD;
475		break;
476	case 2:
477		ch_ctrl->dwidth = WIDTH_WORD;
478		break;
479	default:
480		err = -EINVAL;
481		goto out;
482	}
483	ctrl >>= 3;
484
485	ch_ctrl->src_ahb1 = ctrl & 1;
486	ctrl >>= 1;
487
488	ch_ctrl->dest_ahb1 = ctrl & 1;
489	ctrl >>= 1;
490
491	ch_ctrl->si = ctrl & 1;
492	ctrl >>= 1;
493
494	ch_ctrl->di = ctrl & 1;
495	ctrl >>= 1;
496
497	ch_ctrl->priv_mode = ctrl & 1;
498	ctrl >>= 1;
499
500	ch_ctrl->bufferable = ctrl & 1;
501	ctrl >>= 1;
502
503	ch_ctrl->cacheable = ctrl & 1;
504	ctrl >>= 1;
505
506	ch_ctrl->tc_mask = ctrl & 1;
507
508out:
509	return err;
510}
511
512EXPORT_SYMBOL_GPL(pnx4008_dma_parse_control);
513
514int pnx4008_dma_pack_config(const struct pnx4008_dma_ch_config * ch_cfg,
515			    unsigned long *cfg)
516{
517	int err = 0;
518
519	if (!cfg || !ch_cfg) {
520		err = -EINVAL;
521		goto out;
522	}
523
524	*cfg = 0;
525
526	switch (ch_cfg->halt) {
527	case 0:
528		break;
529	case 1:
530		*cfg |= (1 << 18);
531		break;
532
533	default:
534		err = -EINVAL;
535		goto out;
536	}
537	switch (ch_cfg->active) {
538	case 0:
539		break;
540	case 1:
541		*cfg |= (1 << 17);
542		break;
543
544	default:
545		err = -EINVAL;
546		goto out;
547	}
548	switch (ch_cfg->lock) {
549	case 0:
550		break;
551	case 1:
552		*cfg |= (1 << 16);
553		break;
554
555	default:
556		err = -EINVAL;
557		goto out;
558	}
559	switch (ch_cfg->itc) {
560	case 0:
561		break;
562	case 1:
563		*cfg |= (1 << 15);
564		break;
565
566	default:
567		err = -EINVAL;
568		goto out;
569	}
570	switch (ch_cfg->ie) {
571	case 0:
572		break;
573	case 1:
574		*cfg |= (1 << 14);
575		break;
576
577	default:
578		err = -EINVAL;
579		goto out;
580	}
581	switch (ch_cfg->flow_cntrl) {
582	case FC_MEM2MEM_DMA:
583		*cfg &= ~(7 << 11);
584		break;
585	case FC_MEM2PER_DMA:
586		*cfg &= ~(7 << 11);
587		*cfg |= (1 << 11);
588		break;
589	case FC_PER2MEM_DMA:
590		*cfg &= ~(7 << 11);
591		*cfg |= (2 << 11);
592		break;
593	case FC_PER2PER_DMA:
594		*cfg &= ~(7 << 11);
595		*cfg |= (3 << 11);
596		break;
597	case FC_PER2PER_DPER:
598		*cfg &= ~(7 << 11);
599		*cfg |= (4 << 11);
600		break;
601	case FC_MEM2PER_PER:
602		*cfg &= ~(7 << 11);
603		*cfg |= (5 << 11);
604		break;
605	case FC_PER2MEM_PER:
606		*cfg &= ~(7 << 11);
607		*cfg |= (6 << 11);
608		break;
609	case FC_PER2PER_SPER:
610		*cfg |= (7 << 11);
611		break;
612
613	default:
614		err = -EINVAL;
615		goto out;
616	}
617	*cfg &= ~(0x1f << 6);
618	*cfg |= ((ch_cfg->dest_per & 0x1f) << 6);
619
620	*cfg &= ~(0x1f << 1);
621	*cfg |= ((ch_cfg->src_per & 0x1f) << 1);
622
623out:
624	return err;
625}
626
627EXPORT_SYMBOL_GPL(pnx4008_dma_pack_config);
628
629int pnx4008_dma_parse_config(unsigned long cfg,
630			     struct pnx4008_dma_ch_config * ch_cfg)
631{
632	int err = 0;
633
634	if (!ch_cfg) {
635		err = -EINVAL;
636		goto out;
637	}
638
639	cfg >>= 1;
640
641	ch_cfg->src_per = cfg & 0x1f;
642	cfg >>= 5;
643
644	ch_cfg->dest_per = cfg & 0x1f;
645	cfg >>= 5;
646
647	switch (cfg & 7) {
648	case 0:
649		ch_cfg->flow_cntrl = FC_MEM2MEM_DMA;
650		break;
651	case 1:
652		ch_cfg->flow_cntrl = FC_MEM2PER_DMA;
653		break;
654	case 2:
655		ch_cfg->flow_cntrl = FC_PER2MEM_DMA;
656		break;
657	case 3:
658		ch_cfg->flow_cntrl = FC_PER2PER_DMA;
659		break;
660	case 4:
661		ch_cfg->flow_cntrl = FC_PER2PER_DPER;
662		break;
663	case 5:
664		ch_cfg->flow_cntrl = FC_MEM2PER_PER;
665		break;
666	case 6:
667		ch_cfg->flow_cntrl = FC_PER2MEM_PER;
668		break;
669	case 7:
670		ch_cfg->flow_cntrl = FC_PER2PER_SPER;
671	}
672	cfg >>= 3;
673
674	ch_cfg->ie = cfg & 1;
675	cfg >>= 1;
676
677	ch_cfg->itc = cfg & 1;
678	cfg >>= 1;
679
680	ch_cfg->lock = cfg & 1;
681	cfg >>= 1;
682
683	ch_cfg->active = cfg & 1;
684	cfg >>= 1;
685
686	ch_cfg->halt = cfg & 1;
687
688out:
689	return err;
690}
691
692EXPORT_SYMBOL_GPL(pnx4008_dma_parse_config);
693
694void pnx4008_dma_split_head_entry(struct pnx4008_dma_config * config,
695				  struct pnx4008_dma_ch_ctrl * ctrl)
696{
697	int new_len = ctrl->tr_size, num_entries = 0;
698	int old_len = new_len;
699	int src_width, dest_width, count = 1;
700
701	switch (ctrl->swidth) {
702	case WIDTH_BYTE:
703		src_width = 1;
704		break;
705	case WIDTH_HWORD:
706		src_width = 2;
707		break;
708	case WIDTH_WORD:
709		src_width = 4;
710		break;
711	default:
712		return;
713	}
714
715	switch (ctrl->dwidth) {
716	case WIDTH_BYTE:
717		dest_width = 1;
718		break;
719	case WIDTH_HWORD:
720		dest_width = 2;
721		break;
722	case WIDTH_WORD:
723		dest_width = 4;
724		break;
725	default:
726		return;
727	}
728
729	while (new_len > 0x7FF) {
730		num_entries++;
731		new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
732	}
733	if (num_entries != 0) {
734		struct pnx4008_dma_ll *ll = NULL;
735		config->ch_ctrl &= ~0x7ff;
736		config->ch_ctrl |= new_len;
737		if (!config->is_ll) {
738			config->is_ll = 1;
739			while (num_entries) {
740				if (!ll) {
741					config->ll =
742					    pnx4008_alloc_ll_entry(&config->
743								   ll_dma);
744					ll = config->ll;
745				} else {
746					ll->next =
747					    pnx4008_alloc_ll_entry(&ll->
748								   next_dma);
749					ll = ll->next;
750				}
751
752				if (ctrl->si)
753					ll->src_addr =
754					    config->src_addr +
755					    src_width * new_len * count;
756				else
757					ll->src_addr = config->src_addr;
758				if (ctrl->di)
759					ll->dest_addr =
760					    config->dest_addr +
761					    dest_width * new_len * count;
762				else
763					ll->dest_addr = config->dest_addr;
764				ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
765				ll->next_dma = 0;
766				ll->next = NULL;
767				num_entries--;
768				count++;
769			}
770		} else {
771			struct pnx4008_dma_ll *ll_old = config->ll;
772			unsigned long ll_dma_old = config->ll_dma;
773			while (num_entries) {
774				if (!ll) {
775					config->ll =
776					    pnx4008_alloc_ll_entry(&config->
777								   ll_dma);
778					ll = config->ll;
779				} else {
780					ll->next =
781					    pnx4008_alloc_ll_entry(&ll->
782								   next_dma);
783					ll = ll->next;
784				}
785
786				if (ctrl->si)
787					ll->src_addr =
788					    config->src_addr +
789					    src_width * new_len * count;
790				else
791					ll->src_addr = config->src_addr;
792				if (ctrl->di)
793					ll->dest_addr =
794					    config->dest_addr +
795					    dest_width * new_len * count;
796				else
797					ll->dest_addr = config->dest_addr;
798				ll->ch_ctrl = config->ch_ctrl & 0x7fffffff;
799				ll->next_dma = 0;
800				ll->next = NULL;
801				num_entries--;
802				count++;
803			}
804			ll->next_dma = ll_dma_old;
805			ll->next = ll_old;
806		}
807		/* adjust last length/tc */
808		ll->ch_ctrl = config->ch_ctrl & (~0x7ff);
809		ll->ch_ctrl |= old_len - new_len * (count - 1);
810		config->ch_ctrl &= 0x7fffffff;
811	}
812}
813
814EXPORT_SYMBOL_GPL(pnx4008_dma_split_head_entry);
815
816void pnx4008_dma_split_ll_entry(struct pnx4008_dma_ll * cur_ll,
817				struct pnx4008_dma_ch_ctrl * ctrl)
818{
819	int new_len = ctrl->tr_size, num_entries = 0;
820	int old_len = new_len;
821	int src_width, dest_width, count = 1;
822
823	switch (ctrl->swidth) {
824	case WIDTH_BYTE:
825		src_width = 1;
826		break;
827	case WIDTH_HWORD:
828		src_width = 2;
829		break;
830	case WIDTH_WORD:
831		src_width = 4;
832		break;
833	default:
834		return;
835	}
836
837	switch (ctrl->dwidth) {
838	case WIDTH_BYTE:
839		dest_width = 1;
840		break;
841	case WIDTH_HWORD:
842		dest_width = 2;
843		break;
844	case WIDTH_WORD:
845		dest_width = 4;
846		break;
847	default:
848		return;
849	}
850
851	while (new_len > 0x7FF) {
852		num_entries++;
853		new_len = (ctrl->tr_size + num_entries) / (num_entries + 1);
854	}
855	if (num_entries != 0) {
856		struct pnx4008_dma_ll *ll = NULL;
857		cur_ll->ch_ctrl &= ~0x7ff;
858		cur_ll->ch_ctrl |= new_len;
859		if (!cur_ll->next) {
860			while (num_entries) {
861				if (!ll) {
862					cur_ll->next =
863					    pnx4008_alloc_ll_entry(&cur_ll->
864								   next_dma);
865					ll = cur_ll->next;
866				} else {
867					ll->next =
868					    pnx4008_alloc_ll_entry(&ll->
869								   next_dma);
870					ll = ll->next;
871				}
872
873				if (ctrl->si)
874					ll->src_addr =
875					    cur_ll->src_addr +
876					    src_width * new_len * count;
877				else
878					ll->src_addr = cur_ll->src_addr;
879				if (ctrl->di)
880					ll->dest_addr =
881					    cur_ll->dest_addr +
882					    dest_width * new_len * count;
883				else
884					ll->dest_addr = cur_ll->dest_addr;
885				ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
886				ll->next_dma = 0;
887				ll->next = NULL;
888				num_entries--;
889				count++;
890			}
891		} else {
892			struct pnx4008_dma_ll *ll_old = cur_ll->next;
893			unsigned long ll_dma_old = cur_ll->next_dma;
894			while (num_entries) {
895				if (!ll) {
896					cur_ll->next =
897					    pnx4008_alloc_ll_entry(&cur_ll->
898								   next_dma);
899					ll = cur_ll->next;
900				} else {
901					ll->next =
902					    pnx4008_alloc_ll_entry(&ll->
903								   next_dma);
904					ll = ll->next;
905				}
906
907				if (ctrl->si)
908					ll->src_addr =
909					    cur_ll->src_addr +
910					    src_width * new_len * count;
911				else
912					ll->src_addr = cur_ll->src_addr;
913				if (ctrl->di)
914					ll->dest_addr =
915					    cur_ll->dest_addr +
916					    dest_width * new_len * count;
917				else
918					ll->dest_addr = cur_ll->dest_addr;
919				ll->ch_ctrl = cur_ll->ch_ctrl & 0x7fffffff;
920				ll->next_dma = 0;
921				ll->next = NULL;
922				num_entries--;
923				count++;
924			}
925
926			ll->next_dma = ll_dma_old;
927			ll->next = ll_old;
928		}
929		/* adjust last length/tc */
930		ll->ch_ctrl = cur_ll->ch_ctrl & (~0x7ff);
931		ll->ch_ctrl |= old_len - new_len * (count - 1);
932		cur_ll->ch_ctrl &= 0x7fffffff;
933	}
934}
935
936EXPORT_SYMBOL_GPL(pnx4008_dma_split_ll_entry);
937
938int pnx4008_config_channel(int ch, struct pnx4008_dma_config * config)
939{
940	if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
941		return -EINVAL;
942
943	pnx4008_dma_lock();
944	__raw_writel(config->src_addr, DMAC_Cx_SRC_ADDR(ch));
945	__raw_writel(config->dest_addr, DMAC_Cx_DEST_ADDR(ch));
946
947	if (config->is_ll)
948		__raw_writel(config->ll_dma, DMAC_Cx_LLI(ch));
949	else
950		__raw_writel(0, DMAC_Cx_LLI(ch));
951
952	__raw_writel(config->ch_ctrl, DMAC_Cx_CONTROL(ch));
953	__raw_writel(config->ch_cfg, DMAC_Cx_CONFIG(ch));
954	pnx4008_dma_unlock();
955
956	return 0;
957
958}
959
960EXPORT_SYMBOL_GPL(pnx4008_config_channel);
961
962int pnx4008_channel_get_config(int ch, struct pnx4008_dma_config * config)
963{
964	if (!VALID_CHANNEL(ch) || !dma_channels[ch].name || !config)
965		return -EINVAL;
966
967	pnx4008_dma_lock();
968	config->ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
969	config->ch_ctrl = __raw_readl(DMAC_Cx_CONTROL(ch));
970
971	config->ll_dma = __raw_readl(DMAC_Cx_LLI(ch));
972	config->is_ll = config->ll_dma ? 1 : 0;
973
974	config->src_addr = __raw_readl(DMAC_Cx_SRC_ADDR(ch));
975	config->dest_addr = __raw_readl(DMAC_Cx_DEST_ADDR(ch));
976	pnx4008_dma_unlock();
977
978	return 0;
979}
980
981EXPORT_SYMBOL_GPL(pnx4008_channel_get_config);
982
983int pnx4008_dma_ch_enable(int ch)
984{
985	unsigned long ch_cfg;
986
987	if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
988		return -EINVAL;
989
990	pnx4008_dma_lock();
991	ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
992	ch_cfg |= 1;
993	__raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
994	pnx4008_dma_unlock();
995
996	return 0;
997}
998
999EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enable);
1000
1001int pnx4008_dma_ch_disable(int ch)
1002{
1003	unsigned long ch_cfg;
1004
1005	if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1006		return -EINVAL;
1007
1008	pnx4008_dma_lock();
1009	ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1010	ch_cfg &= ~1;
1011	__raw_writel(ch_cfg, DMAC_Cx_CONFIG(ch));
1012	pnx4008_dma_unlock();
1013
1014	return 0;
1015}
1016
1017EXPORT_SYMBOL_GPL(pnx4008_dma_ch_disable);
1018
1019int pnx4008_dma_ch_enabled(int ch)
1020{
1021	unsigned long ch_cfg;
1022
1023	if (!VALID_CHANNEL(ch) || !dma_channels[ch].name)
1024		return -EINVAL;
1025
1026	pnx4008_dma_lock();
1027	ch_cfg = __raw_readl(DMAC_Cx_CONFIG(ch));
1028	pnx4008_dma_unlock();
1029
1030	return ch_cfg & 1;
1031}
1032
1033EXPORT_SYMBOL_GPL(pnx4008_dma_ch_enabled);
1034
1035static irqreturn_t dma_irq_handler(int irq, void *dev_id)
1036{
1037	int i;
1038	unsigned long dint = __raw_readl(DMAC_INT_STAT);
1039	unsigned long tcint = __raw_readl(DMAC_INT_TC_STAT);
1040	unsigned long eint = __raw_readl(DMAC_INT_ERR_STAT);
1041	unsigned long i_bit;
1042
1043	for (i = MAX_DMA_CHANNELS - 1; i >= 0; i--) {
1044		i_bit = 1 << i;
1045		if (dint & i_bit) {
1046			struct dma_channel *channel = &dma_channels[i];
1047
1048			if (channel->name && channel->irq_handler) {
1049				int cause = 0;
1050
1051				if (eint & i_bit)
1052					cause |= DMA_ERR_INT;
1053				if (tcint & i_bit)
1054					cause |= DMA_TC_INT;
1055				channel->irq_handler(i, cause, channel->data);
1056			} else {
1057				/*
1058				 * IRQ for an unregistered DMA channel
1059				 */
1060				printk(KERN_WARNING
1061				       "spurious IRQ for DMA channel %d\n", i);
1062			}
1063			if (tcint & i_bit)
1064				__raw_writel(i_bit, DMAC_INT_TC_CLEAR);
1065			if (eint & i_bit)
1066				__raw_writel(i_bit, DMAC_INT_ERR_CLEAR);
1067		}
1068	}
1069	return IRQ_HANDLED;
1070}
1071
1072static int __init pnx4008_dma_init(void)
1073{
1074	int ret, i;
1075
1076	ret = request_irq(DMA_INT, dma_irq_handler, 0, "DMA", NULL);
1077	if (ret) {
1078		printk(KERN_CRIT "Wow!  Can't register IRQ for DMA\n");
1079		goto out;
1080	}
1081
1082	ll_pool.count = 0x4000 / sizeof(struct pnx4008_dma_ll);
1083	ll_pool.cur = ll_pool.vaddr =
1084	    dma_alloc_coherent(NULL, ll_pool.count * sizeof(struct pnx4008_dma_ll),
1085			       &ll_pool.dma_addr, GFP_KERNEL);
1086
1087	if (!ll_pool.vaddr) {
1088		ret = -ENOMEM;
1089		free_irq(DMA_INT, NULL);
1090		goto out;
1091	}
1092
1093	for (i = 0; i < ll_pool.count - 1; i++) {
1094		void **addr = ll_pool.vaddr + i * sizeof(struct pnx4008_dma_ll);
1095		*addr = (void *)addr + sizeof(struct pnx4008_dma_ll);
1096	}
1097	*(long *)(ll_pool.vaddr +
1098		  (ll_pool.count - 1) * sizeof(struct pnx4008_dma_ll)) =
1099	    (long)ll_pool.vaddr;
1100
1101	__raw_writel(1, DMAC_CONFIG);
1102
1103out:
1104	return ret;
1105}
1106arch_initcall(pnx4008_dma_init);
1107