1 #include <linux/module.h>
2 #include <linux/init.h>
3 #include <linux/sched.h>
4 #include <linux/kernel.h>
5 #include <linux/slab.h>
6 #include <linux/string.h>
7 #include <linux/timer.h>
9 #include <linux/errno.h>
10 #include <linux/stat.h>
12 #include <linux/tty.h>
13 #include <linux/selection.h>
14 #include <linux/kmod.h>
15 #include <linux/vmalloc.h>
16 #include <linux/interrupt.h>
17 #include <linux/delay.h>
18 #include <asm/uaccess.h>
19 #include <linux/errno.h>
22 #include <asm/ifxmips/ifxmips.h>
23 #include <asm/ifxmips/ifxmips_irq.h>
24 #include <asm/ifxmips/ifxmips_dma.h>
25 #include <asm/ifxmips/ifxmips_pmu.h>
27 /*25 descriptors for each dma channel,4096/8/20=25.xx*/
28 #define IFXMIPS_DMA_DESCRIPTOR_OFFSET 25
30 #define MAX_DMA_DEVICE_NUM 6 /*max ports connecting to dma */
31 #define MAX_DMA_CHANNEL_NUM 20 /*max dma channels */
32 #define DMA_INT_BUDGET 100 /*budget for interrupt handling */
33 #define DMA_POLL_COUNTER 4 /*fix me, set the correct counter value here! */
35 extern void mask_and_ack_ifxmips_irq (unsigned int irq_nr
);
36 extern void enable_ifxmips_irq (unsigned int irq_nr
);
37 extern void disable_ifxmips_irq (unsigned int irq_nr
);
40 _dma_device_info dma_devs
[MAX_DMA_DEVICE_NUM
];
41 _dma_channel_info dma_chan
[MAX_DMA_CHANNEL_NUM
];
43 char global_device_name
[MAX_DMA_DEVICE_NUM
][20] =
44 { {"PPE"}, {"DEU"}, {"SPI"}, {"SDIO"}, {"MCTRL0"}, {"MCTRL1"} };
46 _dma_chan_map default_dma_map
[MAX_DMA_CHANNEL_NUM
] = {
47 {"PPE", IFXMIPS_DMA_RX
, 0, IFXMIPS_DMA_CH0_INT
, 0},
48 {"PPE", IFXMIPS_DMA_TX
, 0, IFXMIPS_DMA_CH1_INT
, 0},
49 {"PPE", IFXMIPS_DMA_RX
, 1, IFXMIPS_DMA_CH2_INT
, 1},
50 {"PPE", IFXMIPS_DMA_TX
, 1, IFXMIPS_DMA_CH3_INT
, 1},
51 {"PPE", IFXMIPS_DMA_RX
, 2, IFXMIPS_DMA_CH4_INT
, 2},
52 {"PPE", IFXMIPS_DMA_TX
, 2, IFXMIPS_DMA_CH5_INT
, 2},
53 {"PPE", IFXMIPS_DMA_RX
, 3, IFXMIPS_DMA_CH6_INT
, 3},
54 {"PPE", IFXMIPS_DMA_TX
, 3, IFXMIPS_DMA_CH7_INT
, 3},
55 {"DEU", IFXMIPS_DMA_RX
, 0, IFXMIPS_DMA_CH8_INT
, 0},
56 {"DEU", IFXMIPS_DMA_TX
, 0, IFXMIPS_DMA_CH9_INT
, 0},
57 {"DEU", IFXMIPS_DMA_RX
, 1, IFXMIPS_DMA_CH10_INT
, 1},
58 {"DEU", IFXMIPS_DMA_TX
, 1, IFXMIPS_DMA_CH11_INT
, 1},
59 {"SPI", IFXMIPS_DMA_RX
, 0, IFXMIPS_DMA_CH12_INT
, 0},
60 {"SPI", IFXMIPS_DMA_TX
, 0, IFXMIPS_DMA_CH13_INT
, 0},
61 {"SDIO", IFXMIPS_DMA_RX
, 0, IFXMIPS_DMA_CH14_INT
, 0},
62 {"SDIO", IFXMIPS_DMA_TX
, 0, IFXMIPS_DMA_CH15_INT
, 0},
63 {"MCTRL0", IFXMIPS_DMA_RX
, 0, IFXMIPS_DMA_CH16_INT
, 0},
64 {"MCTRL0", IFXMIPS_DMA_TX
, 0, IFXMIPS_DMA_CH17_INT
, 0},
65 {"MCTRL1", IFXMIPS_DMA_RX
, 1, IFXMIPS_DMA_CH18_INT
, 1},
66 {"MCTRL1", IFXMIPS_DMA_TX
, 1, IFXMIPS_DMA_CH19_INT
, 1}
69 _dma_chan_map
*chan_map
= default_dma_map
;
70 volatile u32 g_ifxmips_dma_int_status
= 0;
71 volatile int g_ifxmips_dma_in_process
= 0;/*0=not in process,1=in process*/
73 void do_dma_tasklet (unsigned long);
74 DECLARE_TASKLET (dma_tasklet
, do_dma_tasklet
, 0);
77 common_buffer_alloc (int len
, int *byte_offset
, void **opt
)
79 u8
*buffer
= (u8
*) kmalloc (len
* sizeof (u8
), GFP_KERNEL
);
87 common_buffer_free (u8
*dataptr
, void *opt
)
94 enable_ch_irq (_dma_channel_info
*pCh
)
96 int chan_no
= (int)(pCh
- dma_chan
);
100 writel(chan_no
, IFXMIPS_DMA_CS
);
101 writel(0x4a, IFXMIPS_DMA_CIE
);
102 writel(readl(IFXMIPS_DMA_IRNEN
) | (1 << chan_no
), IFXMIPS_DMA_IRNEN
);
103 local_irq_restore(flag
);
104 enable_ifxmips_irq(pCh
->irq
);
108 disable_ch_irq (_dma_channel_info
*pCh
)
111 int chan_no
= (int) (pCh
- dma_chan
);
113 local_irq_save(flag
);
114 g_ifxmips_dma_int_status
&= ~(1 << chan_no
);
115 writel(chan_no
, IFXMIPS_DMA_CS
);
116 writel(0, IFXMIPS_DMA_CIE
);
117 writel(readl(IFXMIPS_DMA_IRNEN
) & ~(1 << chan_no
), IFXMIPS_DMA_IRNEN
);
118 local_irq_restore(flag
);
119 mask_and_ack_ifxmips_irq(pCh
->irq
);
123 open_chan (_dma_channel_info
*pCh
)
126 int chan_no
= (int)(pCh
- dma_chan
);
128 local_irq_save(flag
);
129 writel(chan_no
, IFXMIPS_DMA_CS
);
130 writel(readl(IFXMIPS_DMA_CCTRL
) | 1, IFXMIPS_DMA_CCTRL
);
131 if(pCh
->dir
== IFXMIPS_DMA_RX
)
133 local_irq_restore(flag
);
137 close_chan(_dma_channel_info
*pCh
)
140 int chan_no
= (int) (pCh
- dma_chan
);
142 local_irq_save(flag
);
143 writel(chan_no
, IFXMIPS_DMA_CS
);
144 writel(readl(IFXMIPS_DMA_CCTRL
) & ~1, IFXMIPS_DMA_CCTRL
);
146 local_irq_restore(flag
);
150 reset_chan (_dma_channel_info
*pCh
)
152 int chan_no
= (int) (pCh
- dma_chan
);
154 writel(chan_no
, IFXMIPS_DMA_CS
);
155 writel(readl(IFXMIPS_DMA_CCTRL
) | 2, IFXMIPS_DMA_CCTRL
);
159 rx_chan_intr_handler (int chan_no
)
161 _dma_device_info
*pDev
= (_dma_device_info
*)dma_chan
[chan_no
].dma_dev
;
162 _dma_channel_info
*pCh
= &dma_chan
[chan_no
];
163 struct rx_desc
*rx_desc_p
;
167 /*handle command complete interrupt */
168 rx_desc_p
= (struct rx_desc
*)pCh
->desc_base
+ pCh
->curr_desc
;
169 if (rx_desc_p
->status
.field
.OWN
== CPU_OWN
170 && rx_desc_p
->status
.field
.C
171 && rx_desc_p
->status
.field
.data_length
< 1536){
172 /*Every thing is correct, then we inform the upper layer */
173 pDev
->current_rx_chan
= pCh
->rel_chan_no
;
174 if(pDev
->intr_handler
)
175 pDev
->intr_handler(pDev
, RCV_INT
);
178 local_irq_save(flag
);
179 tmp
= readl(IFXMIPS_DMA_CS
);
180 writel(chan_no
, IFXMIPS_DMA_CS
);
181 writel(readl(IFXMIPS_DMA_CIS
) | 0x7e, IFXMIPS_DMA_CIS
);
182 writel(tmp
, IFXMIPS_DMA_CS
);
183 g_ifxmips_dma_int_status
&= ~(1 << chan_no
);
184 local_irq_restore(flag
);
185 enable_ifxmips_irq(dma_chan
[chan_no
].irq
);
190 tx_chan_intr_handler (int chan_no
)
192 _dma_device_info
*pDev
= (_dma_device_info
*)dma_chan
[chan_no
].dma_dev
;
193 _dma_channel_info
*pCh
= &dma_chan
[chan_no
];
197 local_irq_save(flag
);
198 tmp
= readl(IFXMIPS_DMA_CS
);
199 writel(chan_no
, IFXMIPS_DMA_CS
);
200 writel(readl(IFXMIPS_DMA_CIS
) | 0x7e, IFXMIPS_DMA_CIS
);
201 writel(tmp
, IFXMIPS_DMA_CS
);
202 g_ifxmips_dma_int_status
&= ~(1 << chan_no
);
203 local_irq_restore(flag
);
204 pDev
->current_tx_chan
= pCh
->rel_chan_no
;
205 if (pDev
->intr_handler
)
206 pDev
->intr_handler(pDev
, TRANSMIT_CPT_INT
);
210 do_dma_tasklet (unsigned long unused
)
214 int budget
= DMA_INT_BUDGET
;
218 while (g_ifxmips_dma_int_status
)
222 tasklet_schedule(&dma_tasklet
);
227 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
229 if ((g_ifxmips_dma_int_status
& (1 << i
)) && dma_chan
[i
].weight
> 0)
231 if (dma_chan
[i
].weight
> weight
)
234 weight
= dma_chan
[chan_no
].weight
;
241 if (chan_map
[chan_no
].dir
== IFXMIPS_DMA_RX
)
242 rx_chan_intr_handler(chan_no
);
244 tx_chan_intr_handler(chan_no
);
246 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
248 dma_chan
[i
].weight
= dma_chan
[i
].default_weight
;
253 local_irq_save(flag
);
254 g_ifxmips_dma_in_process
= 0;
255 if (g_ifxmips_dma_int_status
)
257 g_ifxmips_dma_in_process
= 1;
258 tasklet_schedule(&dma_tasklet
);
260 local_irq_restore(flag
);
264 dma_interrupt (int irq
, void *dev_id
)
266 _dma_channel_info
*pCh
;
270 pCh
= (_dma_channel_info
*)dev_id
;
271 chan_no
= (int)(pCh
- dma_chan
);
272 if (chan_no
< 0 || chan_no
> 19)
275 tmp
= readl(IFXMIPS_DMA_IRNEN
);
276 writel(0, IFXMIPS_DMA_IRNEN
);
277 g_ifxmips_dma_int_status
|= 1 << chan_no
;
278 writel(tmp
, IFXMIPS_DMA_IRNEN
);
279 mask_and_ack_ifxmips_irq(irq
);
281 if (!g_ifxmips_dma_in_process
)
283 g_ifxmips_dma_in_process
= 1;
284 tasklet_schedule(&dma_tasklet
);
291 dma_device_reserve (char *dev_name
)
295 for (i
= 0; i
< MAX_DMA_DEVICE_NUM
; i
++)
297 if (strcmp(dev_name
, dma_devs
[i
].device_name
) == 0)
299 if (dma_devs
[i
].reserved
)
301 dma_devs
[i
].reserved
= 1;
310 dma_device_release (_dma_device_info
*dev
)
316 dma_device_register(_dma_device_info
*dev
)
323 _dma_device_info
*pDev
;
324 _dma_channel_info
*pCh
;
325 struct rx_desc
*rx_desc_p
;
326 struct tx_desc
*tx_desc_p
;
328 for (i
= 0; i
< dev
->max_tx_chan_num
; i
++)
330 pCh
= dev
->tx_chan
[i
];
331 if (pCh
->control
== IFXMIPS_DMA_CH_ON
)
333 chan_no
= (int)(pCh
- dma_chan
);
334 for (j
= 0; j
< pCh
->desc_len
; j
++)
336 tx_desc_p
= (struct tx_desc
*)pCh
->desc_base
+ j
;
337 memset(tx_desc_p
, 0, sizeof(struct tx_desc
));
339 local_irq_save(flag
);
340 writel(chan_no
, IFXMIPS_DMA_CS
);
341 /*check if the descriptor length is changed */
342 if (readl(IFXMIPS_DMA_CDLEN
) != pCh
->desc_len
)
343 writel(pCh
->desc_len
, IFXMIPS_DMA_CDLEN
);
345 writel(readl(IFXMIPS_DMA_CCTRL
) & ~1, IFXMIPS_DMA_CCTRL
);
346 writel(readl(IFXMIPS_DMA_CCTRL
) | 2, IFXMIPS_DMA_CCTRL
);
347 while (readl(IFXMIPS_DMA_CCTRL
) & 2){};
348 writel(readl(IFXMIPS_DMA_IRNEN
) | (1 << chan_no
), IFXMIPS_DMA_IRNEN
);
349 writel(0x30100, IFXMIPS_DMA_CCTRL
); /*reset and enable channel,enable channel later */
350 local_irq_restore(flag
);
354 for (i
= 0; i
< dev
->max_rx_chan_num
; i
++)
356 pCh
= dev
->rx_chan
[i
];
357 if (pCh
->control
== IFXMIPS_DMA_CH_ON
)
359 chan_no
= (int)(pCh
- dma_chan
);
361 for (j
= 0; j
< pCh
->desc_len
; j
++)
363 rx_desc_p
= (struct rx_desc
*)pCh
->desc_base
+ j
;
364 pDev
= (_dma_device_info
*)(pCh
->dma_dev
);
365 buffer
= pDev
->buffer_alloc(pCh
->packet_size
, &byte_offset
, (void*)&(pCh
->opt
[j
]));
369 dma_cache_inv((unsigned long) buffer
, pCh
->packet_size
);
371 rx_desc_p
->Data_Pointer
= (u32
)CPHYSADDR((u32
)buffer
);
372 rx_desc_p
->status
.word
= 0;
373 rx_desc_p
->status
.field
.byte_offset
= byte_offset
;
374 rx_desc_p
->status
.field
.OWN
= DMA_OWN
;
375 rx_desc_p
->status
.field
.data_length
= pCh
->packet_size
;
378 local_irq_save(flag
);
379 writel(chan_no
, IFXMIPS_DMA_CS
);
380 /*check if the descriptor length is changed */
381 if (readl(IFXMIPS_DMA_CDLEN
) != pCh
->desc_len
)
382 writel(pCh
->desc_len
, IFXMIPS_DMA_CDLEN
);
383 writel(readl(IFXMIPS_DMA_CCTRL
) & ~1, IFXMIPS_DMA_CCTRL
);
384 writel(readl(IFXMIPS_DMA_CCTRL
) | 2, IFXMIPS_DMA_CCTRL
);
385 while (readl(IFXMIPS_DMA_CCTRL
) & 2){};
386 writel(0x0a, IFXMIPS_DMA_CIE
); /*fix me, should enable all the interrupts here? */
387 writel(readl(IFXMIPS_DMA_IRNEN
) | (1 << chan_no
), IFXMIPS_DMA_IRNEN
);
388 writel(0x30000, IFXMIPS_DMA_CCTRL
);
389 local_irq_restore(flag
);
390 enable_ifxmips_irq(dma_chan
[chan_no
].irq
);
396 dma_device_unregister (_dma_device_info
*dev
)
400 _dma_channel_info
*pCh
;
401 struct rx_desc
*rx_desc_p
;
402 struct tx_desc
*tx_desc_p
;
405 for (i
= 0; i
< dev
->max_tx_chan_num
; i
++)
407 pCh
= dev
->tx_chan
[i
];
408 if (pCh
->control
== IFXMIPS_DMA_CH_ON
)
410 chan_no
= (int)(dev
->tx_chan
[i
] - dma_chan
);
411 local_irq_save (flag
);
412 writel(chan_no
, IFXMIPS_DMA_CS
);
415 pCh
->control
= IFXMIPS_DMA_CH_OFF
;
416 writel(0, IFXMIPS_DMA_CIE
); /*fix me, should disable all the interrupts here? */
417 writel(readl(IFXMIPS_DMA_IRNEN
) & ~(1 << chan_no
), IFXMIPS_DMA_IRNEN
); /*disable interrupts */
418 writel(readl(IFXMIPS_DMA_CCTRL
) & ~1, IFXMIPS_DMA_CCTRL
);
419 while (readl(IFXMIPS_DMA_CCTRL
) & 1) {};
420 local_irq_restore (flag
);
422 for (j
= 0; j
< pCh
->desc_len
; j
++)
424 tx_desc_p
= (struct tx_desc
*)pCh
->desc_base
+ j
;
425 if ((tx_desc_p
->status
.field
.OWN
== CPU_OWN
&& tx_desc_p
->status
.field
.C
)
426 || (tx_desc_p
->status
.field
.OWN
== DMA_OWN
&& tx_desc_p
->status
.field
.data_length
> 0))
428 dev
->buffer_free ((u8
*) __va (tx_desc_p
->Data_Pointer
), (void*)pCh
->opt
[j
]);
430 tx_desc_p
->status
.field
.OWN
= CPU_OWN
;
431 memset (tx_desc_p
, 0, sizeof (struct tx_desc
));
433 //TODO should free buffer that is not transferred by dma
437 for (i
= 0; i
< dev
->max_rx_chan_num
; i
++)
439 pCh
= dev
->rx_chan
[i
];
440 chan_no
= (int)(dev
->rx_chan
[i
] - dma_chan
);
441 disable_ifxmips_irq(pCh
->irq
);
443 local_irq_save(flag
);
444 g_ifxmips_dma_int_status
&= ~(1 << chan_no
);
447 pCh
->control
= IFXMIPS_DMA_CH_OFF
;
449 writel(chan_no
, IFXMIPS_DMA_CS
);
450 writel(0, IFXMIPS_DMA_CIE
); /*fix me, should disable all the interrupts here? */
451 writel(readl(IFXMIPS_DMA_IRNEN
) & ~(1 << chan_no
), IFXMIPS_DMA_IRNEN
); /*disable interrupts */
452 writel(readl(IFXMIPS_DMA_CCTRL
) & ~1, IFXMIPS_DMA_CCTRL
);
453 while (readl(IFXMIPS_DMA_CCTRL
) & 1) {};
455 local_irq_restore (flag
);
456 for (j
= 0; j
< pCh
->desc_len
; j
++)
458 rx_desc_p
= (struct rx_desc
*) pCh
->desc_base
+ j
;
459 if ((rx_desc_p
->status
.field
.OWN
== CPU_OWN
460 && rx_desc_p
->status
.field
.C
)
461 || (rx_desc_p
->status
.field
.OWN
== DMA_OWN
462 && rx_desc_p
->status
.field
.data_length
> 0)) {
463 dev
->buffer_free ((u8
*)
466 (void *) pCh
->opt
[j
]);
473 dma_device_read (struct dma_device_info
*dma_dev
, u8
** dataptr
, void **opt
)
479 _dma_channel_info
*pCh
= dma_dev
->rx_chan
[dma_dev
->current_rx_chan
];
480 struct rx_desc
*rx_desc_p
;
482 /*get the rx data first */
483 rx_desc_p
= (struct rx_desc
*) pCh
->desc_base
+ pCh
->curr_desc
;
484 if (!(rx_desc_p
->status
.field
.OWN
== CPU_OWN
&& rx_desc_p
->status
.field
.C
))
489 buf
= (u8
*) __va (rx_desc_p
->Data_Pointer
);
490 *(u32
*)dataptr
= (u32
)buf
;
491 len
= rx_desc_p
->status
.field
.data_length
;
495 *(int*)opt
= (int)pCh
->opt
[pCh
->curr_desc
];
498 /*replace with a new allocated buffer */
499 buf
= dma_dev
->buffer_alloc(pCh
->packet_size
, &byte_offset
, &p
);
503 dma_cache_inv ((unsigned long) buf
,
505 pCh
->opt
[pCh
->curr_desc
] = p
;
508 rx_desc_p
->Data_Pointer
= (u32
) CPHYSADDR ((u32
) buf
);
509 rx_desc_p
->status
.word
= (DMA_OWN
<< 31) | ((byte_offset
) << 23) | pCh
->packet_size
;
512 *(u32
*) dataptr
= 0;
518 /*increase the curr_desc pointer */
520 if (pCh
->curr_desc
== pCh
->desc_len
)
527 dma_device_write (struct dma_device_info
*dma_dev
, u8
* dataptr
, int len
, void *opt
)
530 u32 tmp
, byte_offset
;
531 _dma_channel_info
*pCh
;
533 struct tx_desc
*tx_desc_p
;
534 local_irq_save (flag
);
536 pCh
= dma_dev
->tx_chan
[dma_dev
->current_tx_chan
];
537 chan_no
= (int)(pCh
- (_dma_channel_info
*) dma_chan
);
539 tx_desc_p
= (struct tx_desc
*)pCh
->desc_base
+ pCh
->prev_desc
;
540 while (tx_desc_p
->status
.field
.OWN
== CPU_OWN
&& tx_desc_p
->status
.field
.C
)
542 dma_dev
->buffer_free((u8
*) __va (tx_desc_p
->Data_Pointer
), pCh
->opt
[pCh
->prev_desc
]);
543 memset(tx_desc_p
, 0, sizeof (struct tx_desc
));
544 pCh
->prev_desc
= (pCh
->prev_desc
+ 1) % (pCh
->desc_len
);
545 tx_desc_p
= (struct tx_desc
*)pCh
->desc_base
+ pCh
->prev_desc
;
547 tx_desc_p
= (struct tx_desc
*)pCh
->desc_base
+ pCh
->curr_desc
;
548 /*Check whether this descriptor is available */
549 if (tx_desc_p
->status
.field
.OWN
== DMA_OWN
|| tx_desc_p
->status
.field
.C
)
551 /*if not , the tell the upper layer device */
552 dma_dev
->intr_handler (dma_dev
, TX_BUF_FULL_INT
);
553 local_irq_restore(flag
);
554 printk (KERN_INFO
"%s %d: failed to write!\n", __func__
, __LINE__
);
558 pCh
->opt
[pCh
->curr_desc
] = opt
;
559 /*byte offset----to adjust the starting address of the data buffer, should be multiple of the burst length. */
560 byte_offset
= ((u32
) CPHYSADDR ((u32
) dataptr
)) % ((dma_dev
->tx_burst_len
) * 4);
561 dma_cache_wback ((unsigned long) dataptr
, len
);
563 tx_desc_p
->Data_Pointer
= (u32
) CPHYSADDR ((u32
) dataptr
) - byte_offset
;
565 tx_desc_p
->status
.word
= (DMA_OWN
<< 31) | DMA_DESC_SOP_SET
| DMA_DESC_EOP_SET
| ((byte_offset
) << 23) | len
;
569 if (pCh
->curr_desc
== pCh
->desc_len
)
572 /*Check whether this descriptor is available */
573 tx_desc_p
= (struct tx_desc
*) pCh
->desc_base
+ pCh
->curr_desc
;
574 if (tx_desc_p
->status
.field
.OWN
== DMA_OWN
)
576 /*if not , the tell the upper layer device */
577 dma_dev
->intr_handler (dma_dev
, TX_BUF_FULL_INT
);
580 writel(chan_no
, IFXMIPS_DMA_CS
);
581 tmp
= readl(IFXMIPS_DMA_CCTRL
);
586 local_irq_restore (flag
);
592 map_dma_chan(_dma_chan_map
*map
)
597 for (i
= 0; i
< MAX_DMA_DEVICE_NUM
; i
++)
599 strcpy(dma_devs
[i
].device_name
, global_device_name
[i
]);
602 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
604 dma_chan
[i
].irq
= map
[i
].irq
;
605 result
= request_irq(dma_chan
[i
].irq
, dma_interrupt
, SA_INTERRUPT
, "dma-core", (void*)&dma_chan
[i
]);
608 printk("error, cannot get dma_irq!\n");
609 free_irq(dma_chan
[i
].irq
, (void *) &dma_interrupt
);
615 for (i
= 0; i
< MAX_DMA_DEVICE_NUM
; i
++)
617 dma_devs
[i
].num_tx_chan
= 0; /*set default tx channel number to be one */
618 dma_devs
[i
].num_rx_chan
= 0; /*set default rx channel number to be one */
619 dma_devs
[i
].max_rx_chan_num
= 0;
620 dma_devs
[i
].max_tx_chan_num
= 0;
621 dma_devs
[i
].buffer_alloc
= &common_buffer_alloc
;
622 dma_devs
[i
].buffer_free
= &common_buffer_free
;
623 dma_devs
[i
].intr_handler
= NULL
;
624 dma_devs
[i
].tx_burst_len
= 4;
625 dma_devs
[i
].rx_burst_len
= 4;
628 writel(0, IFXMIPS_DMA_PS
);
629 writel(readl(IFXMIPS_DMA_PCTRL
) | ((0xf << 8) | (1 << 6)), IFXMIPS_DMA_PCTRL
); /*enable dma drop */
634 writel(1, IFXMIPS_DMA_PS
);
635 writel(0x14, IFXMIPS_DMA_PCTRL
); /*deu port setting */
638 for (j
= 0; j
< MAX_DMA_CHANNEL_NUM
; j
++)
640 dma_chan
[j
].byte_offset
= 0;
641 dma_chan
[j
].open
= &open_chan
;
642 dma_chan
[j
].close
= &close_chan
;
643 dma_chan
[j
].reset
= &reset_chan
;
644 dma_chan
[j
].enable_irq
= &enable_ch_irq
;
645 dma_chan
[j
].disable_irq
= &disable_ch_irq
;
646 dma_chan
[j
].rel_chan_no
= map
[j
].rel_chan_no
;
647 dma_chan
[j
].control
= IFXMIPS_DMA_CH_OFF
;
648 dma_chan
[j
].default_weight
= IFXMIPS_DMA_CH_DEFAULT_WEIGHT
;
649 dma_chan
[j
].weight
= dma_chan
[j
].default_weight
;
650 dma_chan
[j
].curr_desc
= 0;
651 dma_chan
[j
].prev_desc
= 0;
654 for (j
= 0; j
< MAX_DMA_CHANNEL_NUM
; j
++)
656 if (strcmp(dma_devs
[i
].device_name
, map
[j
].dev_name
) == 0)
658 if (map
[j
].dir
== IFXMIPS_DMA_RX
)
660 dma_chan
[j
].dir
= IFXMIPS_DMA_RX
;
661 dma_devs
[i
].max_rx_chan_num
++;
662 dma_devs
[i
].rx_chan
[dma_devs
[i
].max_rx_chan_num
- 1] = &dma_chan
[j
];
663 dma_devs
[i
].rx_chan
[dma_devs
[i
].max_rx_chan_num
- 1]->pri
= map
[j
].pri
;
664 dma_chan
[j
].dma_dev
= (void*)&dma_devs
[i
];
665 } else if(map
[j
].dir
== IFXMIPS_DMA_TX
)
667 dma_chan
[j
].dir
= IFXMIPS_DMA_TX
;
668 dma_devs
[i
].max_tx_chan_num
++;
669 dma_devs
[i
].tx_chan
[dma_devs
[i
].max_tx_chan_num
- 1] = &dma_chan
[j
];
670 dma_devs
[i
].tx_chan
[dma_devs
[i
].max_tx_chan_num
- 1]->pri
= map
[j
].pri
;
671 dma_chan
[j
].dma_dev
= (void*)&dma_devs
[i
];
673 printk ("WRONG DMA MAP!\n");
687 // enable DMA from PMU
688 ifxmips_pmu_enable(IFXMIPS_PMU_PWDCR_DMA
);
691 writel(readl(IFXMIPS_DMA_CTRL
) | 1, IFXMIPS_DMA_CTRL
);
693 // diable all interrupts
694 writel(0, IFXMIPS_DMA_IRNEN
);
696 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
698 writel(i
, IFXMIPS_DMA_CS
);
699 writel(0x2, IFXMIPS_DMA_CCTRL
);
700 writel(0x80000040, IFXMIPS_DMA_CPOLL
);
701 writel(readl(IFXMIPS_DMA_CCTRL
) & ~0x1, IFXMIPS_DMA_CCTRL
);
707 ifxmips_dma_init (void)
712 if (map_dma_chan(default_dma_map
))
715 g_desc_list
= (u64
*)KSEG1ADDR(__get_free_page(GFP_DMA
));
717 if (g_desc_list
== NULL
)
719 printk("no memory for desriptor\n");
723 memset(g_desc_list
, 0, PAGE_SIZE
);
725 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
727 dma_chan
[i
].desc_base
= (u32
)g_desc_list
+ i
* IFXMIPS_DMA_DESCRIPTOR_OFFSET
* 8;
728 dma_chan
[i
].curr_desc
= 0;
729 dma_chan
[i
].desc_len
= IFXMIPS_DMA_DESCRIPTOR_OFFSET
;
731 writel(i
, IFXMIPS_DMA_CS
);
732 writel((u32
)CPHYSADDR(dma_chan
[i
].desc_base
), IFXMIPS_DMA_CDBA
);
733 writel(dma_chan
[i
].desc_len
, IFXMIPS_DMA_CDLEN
);
739 arch_initcall(ifxmips_dma_init
);
746 free_page(KSEG0ADDR((unsigned long) g_desc_list
));
747 for (i
= 0; i
< MAX_DMA_CHANNEL_NUM
; i
++)
748 free_irq(dma_chan
[i
].irq
, (void*)&dma_interrupt
);
751 EXPORT_SYMBOL (dma_device_reserve
);
752 EXPORT_SYMBOL (dma_device_release
);
753 EXPORT_SYMBOL (dma_device_register
);
754 EXPORT_SYMBOL (dma_device_unregister
);
755 EXPORT_SYMBOL (dma_device_read
);
756 EXPORT_SYMBOL (dma_device_write
);
758 MODULE_LICENSE ("GPL");