Blackfin arch: remove unused local define
[safe/jmp/linux-2.6] / arch / blackfin / kernel / bfin_dma_5xx.c
1 /*
2  * File:         arch/blackfin/kernel/bfin_dma_5xx.c
3  * Based on:
4  * Author:
5  *
6  * Created:
7  * Description:  This file contains the simple DMA Implementation for Blackfin
8  *
9  * Modified:
10  *               Copyright 2004-2006 Analog Devices Inc.
11  *
12  * Bugs:         Enter bugs at http://blackfin.uclinux.org/
13  *
14  * This program is free software; you can redistribute it and/or modify
15  * it under the terms of the GNU General Public License as published by
16  * the Free Software Foundation; either version 2 of the License, or
17  * (at your option) any later version.
18  *
19  * This program is distributed in the hope that it will be useful,
20  * but WITHOUT ANY WARRANTY; without even the implied warranty of
21  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22  * GNU General Public License for more details.
23  *
24  * You should have received a copy of the GNU General Public License
25  * along with this program; if not, see the file COPYING, or write
26  * to the Free Software Foundation, Inc.,
27  * 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
28  */
29
30 #include <linux/errno.h>
31 #include <linux/module.h>
32 #include <linux/sched.h>
33 #include <linux/interrupt.h>
34 #include <linux/kernel.h>
35 #include <linux/param.h>
36
37 #include <asm/blackfin.h>
38 #include <asm/dma.h>
39 #include <asm/cacheflush.h>
40
41 /**************************************************************************
42  * Global Variables
43 ***************************************************************************/
44
45 static struct dma_channel dma_ch[MAX_BLACKFIN_DMA_CHANNEL];
46
47 /*------------------------------------------------------------------------------
48  *       Set the Buffer Clear bit in the Configuration register of specific DMA
49  *       channel. This will stop the descriptor based DMA operation.
50  *-----------------------------------------------------------------------------*/
51 static void clear_dma_buffer(unsigned int channel)
52 {
53         dma_ch[channel].regs->cfg |= RESTART;
54         SSYNC();
55         dma_ch[channel].regs->cfg &= ~RESTART;
56         SSYNC();
57 }
58
59 static int __init blackfin_dma_init(void)
60 {
61         int i;
62
63         printk(KERN_INFO "Blackfin DMA Controller\n");
64
65         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
66                 dma_ch[i].chan_status = DMA_CHANNEL_FREE;
67                 dma_ch[i].regs = dma_io_base_addr[i];
68                 mutex_init(&(dma_ch[i].dmalock));
69         }
70         /* Mark MEMDMA Channel 0 as requested since we're using it internally */
71         dma_ch[CH_MEM_STREAM0_DEST].chan_status = DMA_CHANNEL_REQUESTED;
72         dma_ch[CH_MEM_STREAM0_SRC].chan_status = DMA_CHANNEL_REQUESTED;
73
74 #if defined(CONFIG_DEB_DMA_URGENT)
75         bfin_write_EBIU_DDRQUE(bfin_read_EBIU_DDRQUE()
76                          | DEB1_URGENT | DEB2_URGENT | DEB3_URGENT);
77 #endif
78         return 0;
79 }
80
81 arch_initcall(blackfin_dma_init);
82
83 /*------------------------------------------------------------------------------
84  *      Request the specific DMA channel from the system.
85  *-----------------------------------------------------------------------------*/
86 int request_dma(unsigned int channel, char *device_id)
87 {
88
89         pr_debug("request_dma() : BEGIN \n");
90
91 #if defined(CONFIG_BF561) && ANOMALY_05000182
92         if (channel >= CH_IMEM_STREAM0_DEST && channel <= CH_IMEM_STREAM1_DEST) {
93                 if (get_cclk() > 500000000) {
94                         printk(KERN_WARNING
95                                "Request IMDMA failed due to ANOMALY 05000182\n");
96                         return -EFAULT;
97                 }
98         }
99 #endif
100
101         mutex_lock(&(dma_ch[channel].dmalock));
102
103         if ((dma_ch[channel].chan_status == DMA_CHANNEL_REQUESTED)
104             || (dma_ch[channel].chan_status == DMA_CHANNEL_ENABLED)) {
105                 mutex_unlock(&(dma_ch[channel].dmalock));
106                 pr_debug("DMA CHANNEL IN USE  \n");
107                 return -EBUSY;
108         } else {
109                 dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
110                 pr_debug("DMA CHANNEL IS ALLOCATED  \n");
111         }
112
113         mutex_unlock(&(dma_ch[channel].dmalock));
114
115 #ifdef CONFIG_BF54x
116         if (channel >= CH_UART2_RX && channel <= CH_UART3_TX) {
117                 unsigned int per_map;
118                 per_map = dma_ch[channel].regs->peripheral_map & 0xFFF;
119                 if (strncmp(device_id, "BFIN_UART", 9) == 0)
120                         dma_ch[channel].regs->peripheral_map = per_map |
121                                 ((channel - CH_UART2_RX + 0xC)<<12);
122                 else
123                         dma_ch[channel].regs->peripheral_map = per_map |
124                                 ((channel - CH_UART2_RX + 0x6)<<12);
125         }
126 #endif
127
128         dma_ch[channel].device_id = device_id;
129         dma_ch[channel].irq_callback = NULL;
130
131         /* This is to be enabled by putting a restriction -
132          * you have to request DMA, before doing any operations on
133          * descriptor/channel
134          */
135         pr_debug("request_dma() : END  \n");
136         return channel;
137 }
138 EXPORT_SYMBOL(request_dma);
139
140 int set_dma_callback(unsigned int channel, dma_interrupt_t callback, void *data)
141 {
142         int ret_irq = 0;
143
144         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
145                && channel < MAX_BLACKFIN_DMA_CHANNEL));
146
147         if (callback != NULL) {
148                 int ret_val;
149                 ret_irq = channel2irq(channel);
150
151                 dma_ch[channel].data = data;
152
153                 ret_val =
154                     request_irq(ret_irq, (void *)callback, IRQF_DISABLED,
155                                 dma_ch[channel].device_id, data);
156                 if (ret_val) {
157                         printk(KERN_NOTICE
158                                "Request irq in DMA engine failed.\n");
159                         return -EPERM;
160                 }
161                 dma_ch[channel].irq_callback = callback;
162         }
163         return 0;
164 }
165 EXPORT_SYMBOL(set_dma_callback);
166
167 void free_dma(unsigned int channel)
168 {
169         int ret_irq;
170
171         pr_debug("freedma() : BEGIN \n");
172         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
173                && channel < MAX_BLACKFIN_DMA_CHANNEL));
174
175         /* Halt the DMA */
176         disable_dma(channel);
177         clear_dma_buffer(channel);
178
179         if (dma_ch[channel].irq_callback != NULL) {
180                 ret_irq = channel2irq(channel);
181                 free_irq(ret_irq, dma_ch[channel].data);
182         }
183
184         /* Clear the DMA Variable in the Channel */
185         mutex_lock(&(dma_ch[channel].dmalock));
186         dma_ch[channel].chan_status = DMA_CHANNEL_FREE;
187         mutex_unlock(&(dma_ch[channel].dmalock));
188
189         pr_debug("freedma() : END \n");
190 }
191 EXPORT_SYMBOL(free_dma);
192
193 void dma_enable_irq(unsigned int channel)
194 {
195         int ret_irq;
196
197         pr_debug("dma_enable_irq() : BEGIN \n");
198         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
199                && channel < MAX_BLACKFIN_DMA_CHANNEL));
200
201         ret_irq = channel2irq(channel);
202         enable_irq(ret_irq);
203 }
204 EXPORT_SYMBOL(dma_enable_irq);
205
206 void dma_disable_irq(unsigned int channel)
207 {
208         int ret_irq;
209
210         pr_debug("dma_disable_irq() : BEGIN \n");
211         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
212                && channel < MAX_BLACKFIN_DMA_CHANNEL));
213
214         ret_irq = channel2irq(channel);
215         disable_irq(ret_irq);
216 }
217 EXPORT_SYMBOL(dma_disable_irq);
218
219 int dma_channel_active(unsigned int channel)
220 {
221         if (dma_ch[channel].chan_status == DMA_CHANNEL_FREE) {
222                 return 0;
223         } else {
224                 return 1;
225         }
226 }
227 EXPORT_SYMBOL(dma_channel_active);
228
229 /*------------------------------------------------------------------------------
230 *       stop the specific DMA channel.
231 *-----------------------------------------------------------------------------*/
232 void disable_dma(unsigned int channel)
233 {
234         pr_debug("stop_dma() : BEGIN \n");
235
236         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
237                && channel < MAX_BLACKFIN_DMA_CHANNEL));
238
239         dma_ch[channel].regs->cfg &= ~DMAEN;    /* Clean the enable bit */
240         SSYNC();
241         dma_ch[channel].chan_status = DMA_CHANNEL_REQUESTED;
242         /* Needs to be enabled Later */
243         pr_debug("stop_dma() : END \n");
244         return;
245 }
246 EXPORT_SYMBOL(disable_dma);
247
248 void enable_dma(unsigned int channel)
249 {
250         pr_debug("enable_dma() : BEGIN \n");
251
252         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
253                && channel < MAX_BLACKFIN_DMA_CHANNEL));
254
255         dma_ch[channel].chan_status = DMA_CHANNEL_ENABLED;
256         dma_ch[channel].regs->curr_x_count = 0;
257         dma_ch[channel].regs->curr_y_count = 0;
258
259         dma_ch[channel].regs->cfg |= DMAEN;     /* Set the enable bit */
260         SSYNC();
261         pr_debug("enable_dma() : END \n");
262         return;
263 }
264 EXPORT_SYMBOL(enable_dma);
265
266 /*------------------------------------------------------------------------------
267 *               Set the Start Address register for the specific DMA channel
268 *               This function can be used for register based DMA,
269 *               to setup the start address
270 *               addr:           Starting address of the DMA Data to be transferred.
271 *-----------------------------------------------------------------------------*/
272 void set_dma_start_addr(unsigned int channel, unsigned long addr)
273 {
274         pr_debug("set_dma_start_addr() : BEGIN \n");
275
276         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
277                && channel < MAX_BLACKFIN_DMA_CHANNEL));
278
279         dma_ch[channel].regs->start_addr = addr;
280         SSYNC();
281         pr_debug("set_dma_start_addr() : END\n");
282 }
283 EXPORT_SYMBOL(set_dma_start_addr);
284
285 void set_dma_next_desc_addr(unsigned int channel, unsigned long addr)
286 {
287         pr_debug("set_dma_next_desc_addr() : BEGIN \n");
288
289         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
290                && channel < MAX_BLACKFIN_DMA_CHANNEL));
291
292         dma_ch[channel].regs->next_desc_ptr = addr;
293         SSYNC();
294         pr_debug("set_dma_next_desc_addr() : END\n");
295 }
296 EXPORT_SYMBOL(set_dma_next_desc_addr);
297
298 void set_dma_curr_desc_addr(unsigned int channel, unsigned long addr)
299 {
300         pr_debug("set_dma_curr_desc_addr() : BEGIN \n");
301
302         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
303                && channel < MAX_BLACKFIN_DMA_CHANNEL));
304
305         dma_ch[channel].regs->curr_desc_ptr = addr;
306         SSYNC();
307         pr_debug("set_dma_curr_desc_addr() : END\n");
308 }
309 EXPORT_SYMBOL(set_dma_curr_desc_addr);
310
311 void set_dma_x_count(unsigned int channel, unsigned short x_count)
312 {
313         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
314                && channel < MAX_BLACKFIN_DMA_CHANNEL));
315
316         dma_ch[channel].regs->x_count = x_count;
317         SSYNC();
318 }
319 EXPORT_SYMBOL(set_dma_x_count);
320
321 void set_dma_y_count(unsigned int channel, unsigned short y_count)
322 {
323         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
324                && channel < MAX_BLACKFIN_DMA_CHANNEL));
325
326         dma_ch[channel].regs->y_count = y_count;
327         SSYNC();
328 }
329 EXPORT_SYMBOL(set_dma_y_count);
330
331 void set_dma_x_modify(unsigned int channel, short x_modify)
332 {
333         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
334                && channel < MAX_BLACKFIN_DMA_CHANNEL));
335
336         dma_ch[channel].regs->x_modify = x_modify;
337         SSYNC();
338 }
339 EXPORT_SYMBOL(set_dma_x_modify);
340
341 void set_dma_y_modify(unsigned int channel, short y_modify)
342 {
343         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
344                && channel < MAX_BLACKFIN_DMA_CHANNEL));
345
346         dma_ch[channel].regs->y_modify = y_modify;
347         SSYNC();
348 }
349 EXPORT_SYMBOL(set_dma_y_modify);
350
351 void set_dma_config(unsigned int channel, unsigned short config)
352 {
353         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
354                && channel < MAX_BLACKFIN_DMA_CHANNEL));
355
356         dma_ch[channel].regs->cfg = config;
357         SSYNC();
358 }
359 EXPORT_SYMBOL(set_dma_config);
360
361 unsigned short
362 set_bfin_dma_config(char direction, char flow_mode,
363                     char intr_mode, char dma_mode, char width, char syncmode)
364 {
365         unsigned short config;
366
367         config =
368             ((direction << 1) | (width << 2) | (dma_mode << 4) |
369              (intr_mode << 6) | (flow_mode << 12) | (syncmode << 5));
370         return config;
371 }
372 EXPORT_SYMBOL(set_bfin_dma_config);
373
374 void set_dma_sg(unsigned int channel, struct dmasg *sg, int nr_sg)
375 {
376         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
377                && channel < MAX_BLACKFIN_DMA_CHANNEL));
378
379         dma_ch[channel].regs->cfg |= ((nr_sg & 0x0F) << 8);
380
381         dma_ch[channel].regs->next_desc_ptr = (unsigned int)sg;
382
383         SSYNC();
384 }
385 EXPORT_SYMBOL(set_dma_sg);
386
387 void set_dma_curr_addr(unsigned int channel, unsigned long addr)
388 {
389         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
390                && channel < MAX_BLACKFIN_DMA_CHANNEL));
391
392         dma_ch[channel].regs->curr_addr_ptr = addr;
393         SSYNC();
394 }
395 EXPORT_SYMBOL(set_dma_curr_addr);
396
397 /*------------------------------------------------------------------------------
398  *      Get the DMA status of a specific DMA channel from the system.
399  *-----------------------------------------------------------------------------*/
400 unsigned short get_dma_curr_irqstat(unsigned int channel)
401 {
402         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
403                && channel < MAX_BLACKFIN_DMA_CHANNEL));
404
405         return dma_ch[channel].regs->irq_status;
406 }
407 EXPORT_SYMBOL(get_dma_curr_irqstat);
408
409 /*------------------------------------------------------------------------------
410  *      Clear the DMA_DONE bit in DMA status. Stop the DMA completion interrupt.
411  *-----------------------------------------------------------------------------*/
412 void clear_dma_irqstat(unsigned int channel)
413 {
414         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
415                && channel < MAX_BLACKFIN_DMA_CHANNEL));
416         dma_ch[channel].regs->irq_status |= 3;
417 }
418 EXPORT_SYMBOL(clear_dma_irqstat);
419
420 /*------------------------------------------------------------------------------
421  *      Get current DMA xcount of a specific DMA channel from the system.
422  *-----------------------------------------------------------------------------*/
423 unsigned short get_dma_curr_xcount(unsigned int channel)
424 {
425         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
426                && channel < MAX_BLACKFIN_DMA_CHANNEL));
427
428         return dma_ch[channel].regs->curr_x_count;
429 }
430 EXPORT_SYMBOL(get_dma_curr_xcount);
431
432 /*------------------------------------------------------------------------------
433  *      Get current DMA ycount of a specific DMA channel from the system.
434  *-----------------------------------------------------------------------------*/
435 unsigned short get_dma_curr_ycount(unsigned int channel)
436 {
437         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
438                && channel < MAX_BLACKFIN_DMA_CHANNEL));
439
440         return dma_ch[channel].regs->curr_y_count;
441 }
442 EXPORT_SYMBOL(get_dma_curr_ycount);
443
444 unsigned long get_dma_next_desc_ptr(unsigned int channel)
445 {
446         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
447               && channel < MAX_BLACKFIN_DMA_CHANNEL));
448
449         return dma_ch[channel].regs->next_desc_ptr;
450 }
451 EXPORT_SYMBOL(get_dma_next_desc_ptr);
452
453 unsigned long get_dma_curr_desc_ptr(unsigned int channel)
454 {
455         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
456               && channel < MAX_BLACKFIN_DMA_CHANNEL));
457
458         return dma_ch[channel].regs->curr_desc_ptr;
459 }
460 EXPORT_SYMBOL(get_dma_curr_desc_ptr);
461
462 unsigned long get_dma_curr_addr(unsigned int channel)
463 {
464         BUG_ON(!(dma_ch[channel].chan_status != DMA_CHANNEL_FREE
465               && channel < MAX_BLACKFIN_DMA_CHANNEL));
466
467         return dma_ch[channel].regs->curr_addr_ptr;
468 }
469 EXPORT_SYMBOL(get_dma_curr_addr);
470
471 #ifdef CONFIG_PM
472 int blackfin_dma_suspend(void)
473 {
474         int i;
475
476 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
477         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++) {
478 #else
479         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++) {
480 #endif
481                 if (dma_ch[i].chan_status == DMA_CHANNEL_ENABLED) {
482                         printk(KERN_ERR "DMA Channel %d failed to suspend\n", i);
483                         return -EBUSY;
484                 }
485
486                 dma_ch[i].saved_peripheral_map = dma_ch[i].regs->peripheral_map;
487         }
488
489         return 0;
490 }
491
492 void blackfin_dma_resume(void)
493 {
494         int i;
495
496 #ifdef CONFIG_BF561     /* IMDMA channels doesn't have a PERIPHERAL_MAP */
497         for (i = 0; i <= CH_MEM_STREAM3_SRC; i++)
498 #else
499         for (i = 0; i < MAX_BLACKFIN_DMA_CHANNEL; i++)
500 #endif
501                 dma_ch[i].regs->peripheral_map = dma_ch[i].saved_peripheral_map;
502 }
503 #endif
504
505 static void *__dma_memcpy(void *dest, const void *src, size_t size)
506 {
507         int direction;  /* 1 - address decrease, 0 - address increase */
508         int flag_align; /* 1 - address aligned,  0 - address unaligned */
509         int flag_2D;    /* 1 - 2D DMA needed,    0 - 1D DMA needed */
510         unsigned long flags;
511
512         if (size <= 0)
513                 return NULL;
514
515         local_irq_save(flags);
516
517         if ((unsigned long)src < memory_end)
518                 blackfin_dcache_flush_range((unsigned int)src,
519                                             (unsigned int)(src + size));
520
521         if ((unsigned long)dest < memory_end)
522                 blackfin_dcache_invalidate_range((unsigned int)dest,
523                                                  (unsigned int)(dest + size));
524
525         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
526
527         if ((unsigned long)src < (unsigned long)dest)
528                 direction = 1;
529         else
530                 direction = 0;
531
532         if ((((unsigned long)dest % 2) == 0) && (((unsigned long)src % 2) == 0)
533             && ((size % 2) == 0))
534                 flag_align = 1;
535         else
536                 flag_align = 0;
537
538         if (size > 0x10000)     /* size > 64K */
539                 flag_2D = 1;
540         else
541                 flag_2D = 0;
542
543         /* Setup destination and source start address */
544         if (direction) {
545                 if (flag_align) {
546                         bfin_write_MDMA_D0_START_ADDR(dest + size - 2);
547                         bfin_write_MDMA_S0_START_ADDR(src + size - 2);
548                 } else {
549                         bfin_write_MDMA_D0_START_ADDR(dest + size - 1);
550                         bfin_write_MDMA_S0_START_ADDR(src + size - 1);
551                 }
552         } else {
553                 bfin_write_MDMA_D0_START_ADDR(dest);
554                 bfin_write_MDMA_S0_START_ADDR(src);
555         }
556
557         /* Setup destination and source xcount */
558         if (flag_2D) {
559                 if (flag_align) {
560                         bfin_write_MDMA_D0_X_COUNT(1024 / 2);
561                         bfin_write_MDMA_S0_X_COUNT(1024 / 2);
562                 } else {
563                         bfin_write_MDMA_D0_X_COUNT(1024);
564                         bfin_write_MDMA_S0_X_COUNT(1024);
565                 }
566                 bfin_write_MDMA_D0_Y_COUNT(size >> 10);
567                 bfin_write_MDMA_S0_Y_COUNT(size >> 10);
568         } else {
569                 if (flag_align) {
570                         bfin_write_MDMA_D0_X_COUNT(size / 2);
571                         bfin_write_MDMA_S0_X_COUNT(size / 2);
572                 } else {
573                         bfin_write_MDMA_D0_X_COUNT(size);
574                         bfin_write_MDMA_S0_X_COUNT(size);
575                 }
576         }
577
578         /* Setup destination and source xmodify and ymodify */
579         if (direction) {
580                 if (flag_align) {
581                         bfin_write_MDMA_D0_X_MODIFY(-2);
582                         bfin_write_MDMA_S0_X_MODIFY(-2);
583                         if (flag_2D) {
584                                 bfin_write_MDMA_D0_Y_MODIFY(-2);
585                                 bfin_write_MDMA_S0_Y_MODIFY(-2);
586                         }
587                 } else {
588                         bfin_write_MDMA_D0_X_MODIFY(-1);
589                         bfin_write_MDMA_S0_X_MODIFY(-1);
590                         if (flag_2D) {
591                                 bfin_write_MDMA_D0_Y_MODIFY(-1);
592                                 bfin_write_MDMA_S0_Y_MODIFY(-1);
593                         }
594                 }
595         } else {
596                 if (flag_align) {
597                         bfin_write_MDMA_D0_X_MODIFY(2);
598                         bfin_write_MDMA_S0_X_MODIFY(2);
599                         if (flag_2D) {
600                                 bfin_write_MDMA_D0_Y_MODIFY(2);
601                                 bfin_write_MDMA_S0_Y_MODIFY(2);
602                         }
603                 } else {
604                         bfin_write_MDMA_D0_X_MODIFY(1);
605                         bfin_write_MDMA_S0_X_MODIFY(1);
606                         if (flag_2D) {
607                                 bfin_write_MDMA_D0_Y_MODIFY(1);
608                                 bfin_write_MDMA_S0_Y_MODIFY(1);
609                         }
610                 }
611         }
612
613         /* Enable source DMA */
614         if (flag_2D) {
615                 if (flag_align) {
616                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D | WDSIZE_16);
617                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D | WDSIZE_16);
618                 } else {
619                         bfin_write_MDMA_S0_CONFIG(DMAEN | DMA2D);
620                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | DMA2D);
621                 }
622         } else {
623                 if (flag_align) {
624                         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
625                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
626                 } else {
627                         bfin_write_MDMA_S0_CONFIG(DMAEN);
628                         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN);
629                 }
630         }
631
632         SSYNC();
633
634         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE))
635                 ;
636
637         bfin_write_MDMA_D0_IRQ_STATUS(bfin_read_MDMA_D0_IRQ_STATUS() |
638                                       (DMA_DONE | DMA_ERR));
639
640         bfin_write_MDMA_S0_CONFIG(0);
641         bfin_write_MDMA_D0_CONFIG(0);
642
643         local_irq_restore(flags);
644
645         return dest;
646 }
647
648 void *dma_memcpy(void *dest, const void *src, size_t size)
649 {
650         size_t bulk;
651         size_t rest;
652         void * addr;
653
654         bulk = (size >> 16) << 16;
655         rest = size - bulk;
656         if (bulk)
657                 __dma_memcpy(dest, src, bulk);
658         addr = __dma_memcpy(dest+bulk, src+bulk, rest);
659         return addr;
660 }
661 EXPORT_SYMBOL(dma_memcpy);
662
663 void *safe_dma_memcpy(void *dest, const void *src, size_t size)
664 {
665         void *addr;
666         addr = dma_memcpy(dest, src, size);
667         return addr;
668 }
669 EXPORT_SYMBOL(safe_dma_memcpy);
670
671 void dma_outsb(unsigned long addr, const void *buf, unsigned short len)
672 {
673         unsigned long flags;
674
675         local_irq_save(flags);
676
677         blackfin_dcache_flush_range((unsigned int)buf,
678                          (unsigned int)(buf) + len);
679
680         bfin_write_MDMA_D0_START_ADDR(addr);
681         bfin_write_MDMA_D0_X_COUNT(len);
682         bfin_write_MDMA_D0_X_MODIFY(0);
683         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
684
685         bfin_write_MDMA_S0_START_ADDR(buf);
686         bfin_write_MDMA_S0_X_COUNT(len);
687         bfin_write_MDMA_S0_X_MODIFY(1);
688         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
689
690         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
691         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
692
693         SSYNC();
694
695         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
696
697         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
698
699         bfin_write_MDMA_S0_CONFIG(0);
700         bfin_write_MDMA_D0_CONFIG(0);
701         local_irq_restore(flags);
702
703 }
704 EXPORT_SYMBOL(dma_outsb);
705
706
707 void dma_insb(unsigned long addr, void *buf, unsigned short len)
708 {
709         unsigned long flags;
710
711         blackfin_dcache_invalidate_range((unsigned int)buf,
712                          (unsigned int)(buf) + len);
713
714         local_irq_save(flags);
715         bfin_write_MDMA_D0_START_ADDR(buf);
716         bfin_write_MDMA_D0_X_COUNT(len);
717         bfin_write_MDMA_D0_X_MODIFY(1);
718         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
719
720         bfin_write_MDMA_S0_START_ADDR(addr);
721         bfin_write_MDMA_S0_X_COUNT(len);
722         bfin_write_MDMA_S0_X_MODIFY(0);
723         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
724
725         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_8);
726         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_8);
727
728         SSYNC();
729
730         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
731
732         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
733
734         bfin_write_MDMA_S0_CONFIG(0);
735         bfin_write_MDMA_D0_CONFIG(0);
736         local_irq_restore(flags);
737
738 }
739 EXPORT_SYMBOL(dma_insb);
740
741 void dma_outsw(unsigned long addr, const void  *buf, unsigned short len)
742 {
743         unsigned long flags;
744
745         local_irq_save(flags);
746
747         blackfin_dcache_flush_range((unsigned int)buf,
748                          (unsigned int)(buf) + len * sizeof(short));
749
750         bfin_write_MDMA_D0_START_ADDR(addr);
751         bfin_write_MDMA_D0_X_COUNT(len);
752         bfin_write_MDMA_D0_X_MODIFY(0);
753         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
754
755         bfin_write_MDMA_S0_START_ADDR(buf);
756         bfin_write_MDMA_S0_X_COUNT(len);
757         bfin_write_MDMA_S0_X_MODIFY(2);
758         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
759
760         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
761         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
762
763         SSYNC();
764
765         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
766
767         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
768
769         bfin_write_MDMA_S0_CONFIG(0);
770         bfin_write_MDMA_D0_CONFIG(0);
771         local_irq_restore(flags);
772
773 }
774 EXPORT_SYMBOL(dma_outsw);
775
776 void dma_insw(unsigned long addr, void *buf, unsigned short len)
777 {
778         unsigned long flags;
779
780         blackfin_dcache_invalidate_range((unsigned int)buf,
781                          (unsigned int)(buf) + len * sizeof(short));
782
783         local_irq_save(flags);
784
785         bfin_write_MDMA_D0_START_ADDR(buf);
786         bfin_write_MDMA_D0_X_COUNT(len);
787         bfin_write_MDMA_D0_X_MODIFY(2);
788         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
789
790         bfin_write_MDMA_S0_START_ADDR(addr);
791         bfin_write_MDMA_S0_X_COUNT(len);
792         bfin_write_MDMA_S0_X_MODIFY(0);
793         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
794
795         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_16);
796         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_16);
797
798         SSYNC();
799
800         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
801
802         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
803
804         bfin_write_MDMA_S0_CONFIG(0);
805         bfin_write_MDMA_D0_CONFIG(0);
806         local_irq_restore(flags);
807
808 }
809 EXPORT_SYMBOL(dma_insw);
810
811 void dma_outsl(unsigned long addr, const void *buf, unsigned short len)
812 {
813         unsigned long flags;
814
815         local_irq_save(flags);
816
817         blackfin_dcache_flush_range((unsigned int)buf,
818                          (unsigned int)(buf) + len * sizeof(long));
819
820         bfin_write_MDMA_D0_START_ADDR(addr);
821         bfin_write_MDMA_D0_X_COUNT(len);
822         bfin_write_MDMA_D0_X_MODIFY(0);
823         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
824
825         bfin_write_MDMA_S0_START_ADDR(buf);
826         bfin_write_MDMA_S0_X_COUNT(len);
827         bfin_write_MDMA_S0_X_MODIFY(4);
828         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
829
830         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
831         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
832
833         SSYNC();
834
835         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
836
837         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
838
839         bfin_write_MDMA_S0_CONFIG(0);
840         bfin_write_MDMA_D0_CONFIG(0);
841         local_irq_restore(flags);
842
843 }
844 EXPORT_SYMBOL(dma_outsl);
845
846 void dma_insl(unsigned long addr, void *buf, unsigned short len)
847 {
848         unsigned long flags;
849
850         blackfin_dcache_invalidate_range((unsigned int)buf,
851                          (unsigned int)(buf) + len * sizeof(long));
852
853         local_irq_save(flags);
854
855         bfin_write_MDMA_D0_START_ADDR(buf);
856         bfin_write_MDMA_D0_X_COUNT(len);
857         bfin_write_MDMA_D0_X_MODIFY(4);
858         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
859
860         bfin_write_MDMA_S0_START_ADDR(addr);
861         bfin_write_MDMA_S0_X_COUNT(len);
862         bfin_write_MDMA_S0_X_MODIFY(0);
863         bfin_write_MDMA_S0_IRQ_STATUS(DMA_DONE | DMA_ERR);
864
865         bfin_write_MDMA_S0_CONFIG(DMAEN | WDSIZE_32);
866         bfin_write_MDMA_D0_CONFIG(WNR | DI_EN | DMAEN | WDSIZE_32);
867
868         SSYNC();
869
870         while (!(bfin_read_MDMA_D0_IRQ_STATUS() & DMA_DONE));
871
872         bfin_write_MDMA_D0_IRQ_STATUS(DMA_DONE | DMA_ERR);
873
874         bfin_write_MDMA_S0_CONFIG(0);
875         bfin_write_MDMA_D0_CONFIG(0);
876         local_irq_restore(flags);
877
878 }
879 EXPORT_SYMBOL(dma_insl);