blob: 8636cdbf6f8f36428b49346d86e8604a2481d314 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * include/asm-ppc/ppc4xx_dma.h
3 *
4 * IBM PPC4xx DMA engine library
5 *
6 * Copyright 2000-2004 MontaVista Software Inc.
7 *
8 * Cleaned up a bit more, Matt Porter <mporter@kernel.crashing.org>
9 *
10 * Original code by Armin Kuster <akuster@mvista.com>
11 * and Pete Popov <ppopov@mvista.com>
12 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the
15 * Free Software Foundation; either version 2 of the License, or (at your
16 * option) any later version.
17 *
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#ifdef __KERNEL__
24#ifndef __ASMPPC_PPC4xx_DMA_H
25#define __ASMPPC_PPC4xx_DMA_H
26
27#include <linux/config.h>
28#include <linux/types.h>
29#include <asm/mmu.h>
30#include <asm/ibm4xx.h>
31
32#undef DEBUG_4xxDMA
33
34#define MAX_PPC4xx_DMA_CHANNELS 4
35
36/* in arch/ppc/kernel/setup.c -- Cort */
37extern unsigned long DMA_MODE_WRITE, DMA_MODE_READ;
38
39/*
40 * Function return status codes
41 * These values are used to indicate whether or not the function
42 * call was successful, or a bad/invalid parameter was passed.
43 */
44#define DMA_STATUS_GOOD 0
45#define DMA_STATUS_BAD_CHANNEL 1
46#define DMA_STATUS_BAD_HANDLE 2
47#define DMA_STATUS_BAD_MODE 3
48#define DMA_STATUS_NULL_POINTER 4
49#define DMA_STATUS_OUT_OF_MEMORY 5
50#define DMA_STATUS_SGL_LIST_EMPTY 6
51#define DMA_STATUS_GENERAL_ERROR 7
52#define DMA_STATUS_CHANNEL_NOTFREE 8
53
54#define DMA_CHANNEL_BUSY 0x80000000
55
56/*
57 * These indicate status as returned from the DMA Status Register.
58 */
59#define DMA_STATUS_NO_ERROR 0
60#define DMA_STATUS_CS 1 /* Count Status */
61#define DMA_STATUS_TS 2 /* Transfer Status */
62#define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
63#define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
64
65
66/*
67 * DMA Channel Control Registers
68 */
69
70#ifdef CONFIG_44x
71#define PPC4xx_DMA_64BIT
72#define DMA_CR_OFFSET 1
73#else
74#define DMA_CR_OFFSET 0
75#endif
76
77#define DMA_CE_ENABLE (1<<31) /* DMA Channel Enable */
78#define SET_DMA_CE_ENABLE(x) (((x)&0x1)<<31)
79#define GET_DMA_CE_ENABLE(x) (((x)&DMA_CE_ENABLE)>>31)
80
81#define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
82#define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
83#define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
84
85#define DMA_TD (1<<29)
86#define SET_DMA_TD(x) (((x)&0x1)<<29)
87#define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
88
89#define DMA_PL (1<<28) /* Peripheral Location */
90#define SET_DMA_PL(x) (((x)&0x1)<<28)
91#define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
92
93#define EXTERNAL_PERIPHERAL 0
94#define INTERNAL_PERIPHERAL 1
95
96#define SET_DMA_PW(x) (((x)&0x3)<<(26-DMA_CR_OFFSET)) /* Peripheral Width */
97#define DMA_PW_MASK SET_DMA_PW(3)
98#define PW_8 0
99#define PW_16 1
100#define PW_32 2
101#define PW_64 3
102/* FIXME: Add PW_128 support for 440GP DMA block */
103#define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>(26-DMA_CR_OFFSET))
104
105#define DMA_DAI (1<<(25-DMA_CR_OFFSET)) /* Destination Address Increment */
106#define SET_DMA_DAI(x) (((x)&0x1)<<(25-DMA_CR_OFFSET))
107
108#define DMA_SAI (1<<(24-DMA_CR_OFFSET)) /* Source Address Increment */
109#define SET_DMA_SAI(x) (((x)&0x1)<<(24-DMA_CR_OFFSET))
110
111#define DMA_BEN (1<<(23-DMA_CR_OFFSET)) /* Buffer Enable */
112#define SET_DMA_BEN(x) (((x)&0x1)<<(23-DMA_CR_OFFSET))
113
114#define SET_DMA_TM(x) (((x)&0x3)<<(21-DMA_CR_OFFSET)) /* Transfer Mode */
115#define DMA_TM_MASK SET_DMA_TM(3)
116#define TM_PERIPHERAL 0 /* Peripheral */
117#define TM_RESERVED 1 /* Reserved */
118#define TM_S_MM 2 /* Memory to Memory */
119#define TM_D_MM 3 /* Device Paced Memory to Memory */
120#define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>(21-DMA_CR_OFFSET))
121
122#define SET_DMA_PSC(x) (((x)&0x3)<<(19-DMA_CR_OFFSET)) /* Peripheral Setup Cycles */
123#define DMA_PSC_MASK SET_DMA_PSC(3)
124#define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>(19-DMA_CR_OFFSET))
125
126#define SET_DMA_PWC(x) (((x)&0x3F)<<(13-DMA_CR_OFFSET)) /* Peripheral Wait Cycles */
127#define DMA_PWC_MASK SET_DMA_PWC(0x3F)
128#define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>(13-DMA_CR_OFFSET))
129
130#define SET_DMA_PHC(x) (((x)&0x7)<<(10-DMA_CR_OFFSET)) /* Peripheral Hold Cycles */
131#define DMA_PHC_MASK SET_DMA_PHC(0x7)
132#define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>(10-DMA_CR_OFFSET))
133
134#define DMA_ETD_OUTPUT (1<<(9-DMA_CR_OFFSET)) /* EOT pin is a TC output */
135#define SET_DMA_ETD(x) (((x)&0x1)<<(9-DMA_CR_OFFSET))
136
137#define DMA_TCE_ENABLE (1<<(8-DMA_CR_OFFSET))
138#define SET_DMA_TCE(x) (((x)&0x1)<<(8-DMA_CR_OFFSET))
139
140#define DMA_DEC (1<<(2)) /* Address Decrement */
141#define SET_DMA_DEC(x) (((x)&0x1)<<2)
142#define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
143
144
145/*
146 * Transfer Modes
147 * These modes are defined in a way that makes it possible to
148 * simply "or" in the value in the control register.
149 */
150
151#define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
152
153 /* Device-paced memory to memory, */
154 /* device is at source address */
155#define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
156
157 /* Device-paced memory to memory, */
158 /* device is at destination address */
159#define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
160
161/* 405gp/440gp */
162#define SET_DMA_PREFETCH(x) (((x)&0x3)<<(4-DMA_CR_OFFSET)) /* Memory Read Prefetch */
163#define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
164#define PREFETCH_1 0 /* Prefetch 1 Double Word */
165#define PREFETCH_2 1
166#define PREFETCH_4 2
167#define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>(4-DMA_CR_OFFSET))
168
169#define DMA_PCE (1<<(3-DMA_CR_OFFSET)) /* Parity Check Enable */
170#define SET_DMA_PCE(x) (((x)&0x1)<<(3-DMA_CR_OFFSET))
171#define GET_DMA_PCE(x) (((x)&DMA_PCE)>>(3-DMA_CR_OFFSET))
172
173/* stb3x */
174
175#define DMA_ECE_ENABLE (1<<5)
176#define SET_DMA_ECE(x) (((x)&0x1)<<5)
177#define GET_DMA_ECE(x) (((x)&DMA_ECE_ENABLE)>>5)
178
179#define DMA_TCD_DISABLE (1<<4)
180#define SET_DMA_TCD(x) (((x)&0x1)<<4)
181#define GET_DMA_TCD(x) (((x)&DMA_TCD_DISABLE)>>4)
182
183typedef uint32_t sgl_handle_t;
184
185#ifdef CONFIG_PPC4xx_EDMA
186
187#define SGL_LIST_SIZE 4096
188#define DMA_PPC4xx_SIZE SGL_LIST_SIZE
189
190#define SET_DMA_PRIORITY(x) (((x)&0x3)<<(6-DMA_CR_OFFSET)) /* DMA Channel Priority */
191#define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
192#define PRIORITY_LOW 0
193#define PRIORITY_MID_LOW 1
194#define PRIORITY_MID_HIGH 2
195#define PRIORITY_HIGH 3
196#define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>(6-DMA_CR_OFFSET))
197
198/*
199 * DMA Polarity Configuration Register
200 */
201#define DMAReq_ActiveLow(chan) (1<<(31-(chan*3)))
202#define DMAAck_ActiveLow(chan) (1<<(30-(chan*3)))
203#define EOT_ActiveLow(chan) (1<<(29-(chan*3))) /* End of Transfer */
204
205/*
206 * DMA Sleep Mode Register
207 */
208#define SLEEP_MODE_ENABLE (1<<21)
209
210/*
211 * DMA Status Register
212 */
213#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
214#define DMA_CS1 (1<<30)
215#define DMA_CS2 (1<<29)
216#define DMA_CS3 (1<<28)
217
218#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
219#define DMA_TS1 (1<<26)
220#define DMA_TS2 (1<<25)
221#define DMA_TS3 (1<<24)
222
223#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
224#define DMA_CH1_ERR (1<<22)
225#define DMA_CH2_ERR (1<<21)
226#define DMA_CH3_ERR (1<<20)
227
228#define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
229#define DMA_IN_DMA_REQ1 (1<<18)
230#define DMA_IN_DMA_REQ2 (1<<17)
231#define DMA_IN_DMA_REQ3 (1<<16)
232
233#define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
234#define DMA_EXT_DMA_REQ1 (1<<14)
235#define DMA_EXT_DMA_REQ2 (1<<13)
236#define DMA_EXT_DMA_REQ3 (1<<12)
237
238#define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
239#define DMA_CH1_BUSY (1<<10)
240#define DMA_CH2_BUSY (1<<9)
241#define DMA_CH3_BUSY (1<<8)
242
243#define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
244#define DMA_SG1 (1<<6)
245#define DMA_SG2 (1<<5)
246#define DMA_SG3 (1<<4)
247
248/* DMA Channel Count Register */
249#define DMA_CTC_BTEN (1<<23) /* Burst Enable/Disable bit */
250#define DMA_CTC_BSIZ_MSK (3<<21) /* Mask of the Burst size bits */
251#define DMA_CTC_BSIZ_2 (0)
252#define DMA_CTC_BSIZ_4 (1<<21)
253#define DMA_CTC_BSIZ_8 (2<<21)
254#define DMA_CTC_BSIZ_16 (3<<21)
255
256/*
257 * DMA SG Command Register
258 */
259#define SSG_ENABLE(chan) (1<<(31-chan)) /* Start Scatter Gather */
260#define SSG_MASK_ENABLE(chan) (1<<(15-chan)) /* Enable writing to SSG0 bit */
261
262/*
263 * DMA Scatter/Gather Descriptor Bit fields
264 */
265#define SG_LINK (1<<31) /* Link */
266#define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
267#define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
268#define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
269#define SG_COUNT_MASK 0xFFFF /* Count Field */
270
271#define SET_DMA_CONTROL \
272 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
273 SET_DMA_BEN(p_init->buffer_enable) | /* buffer enable */\
274 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
275 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
276 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
277 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
278 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
279 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
280 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
281 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
282 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
283 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
284 SET_DMA_PREFETCH(p_init->pf) /* read prefetch */)
285
286#define GET_DMA_POLARITY(chan) (DMAReq_ActiveLow(chan) | DMAAck_ActiveLow(chan) | EOT_ActiveLow(chan))
287
288#elif defined(CONFIG_STBXXX_DMA) /* stb03xxx */
289
290#define DMA_PPC4xx_SIZE 4096
291
292/*
293 * DMA Status Register
294 */
295
296#define SET_DMA_PRIORITY(x) (((x)&0x00800001)) /* DMA Channel Priority */
297#define DMA_PRIORITY_MASK 0x00800001
298#define PRIORITY_LOW 0x00000000
299#define PRIORITY_MID_LOW 0x00000001
300#define PRIORITY_MID_HIGH 0x00800000
301#define PRIORITY_HIGH 0x00800001
302#define GET_DMA_PRIORITY(x) (((((x)&DMA_PRIORITY_MASK) &0x00800000) >> 22 ) | (((x)&DMA_PRIORITY_MASK) &0x00000001))
303
304#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
305#define DMA_CS1 (1<<30)
306#define DMA_CS2 (1<<29)
307#define DMA_CS3 (1<<28)
308
309#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
310#define DMA_TS1 (1<<26)
311#define DMA_TS2 (1<<25)
312#define DMA_TS3 (1<<24)
313
314#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
315#define DMA_CH1_ERR (1<<22)
316#define DMA_CH2_ERR (1<<21)
317#define DMA_CH3_ERR (1<<20)
318
319#define DMA_CT0 (1<<19) /* Chained transfere */
320
321#define DMA_IN_DMA_REQ0 (1<<18) /* Internal DMA Request is pending */
322#define DMA_IN_DMA_REQ1 (1<<17)
323#define DMA_IN_DMA_REQ2 (1<<16)
324#define DMA_IN_DMA_REQ3 (1<<15)
325
326#define DMA_EXT_DMA_REQ0 (1<<14) /* External DMA Request is pending */
327#define DMA_EXT_DMA_REQ1 (1<<13)
328#define DMA_EXT_DMA_REQ2 (1<<12)
329#define DMA_EXT_DMA_REQ3 (1<<11)
330
331#define DMA_CH0_BUSY (1<<10) /* DMA Channel 0 Busy */
332#define DMA_CH1_BUSY (1<<9)
333#define DMA_CH2_BUSY (1<<8)
334#define DMA_CH3_BUSY (1<<7)
335
336#define DMA_CT1 (1<<6) /* Chained transfere */
337#define DMA_CT2 (1<<5)
338#define DMA_CT3 (1<<4)
339
340#define DMA_CH_ENABLE (1<<7)
341#define SET_DMA_CH(x) (((x)&0x1)<<7)
342#define GET_DMA_CH(x) (((x)&DMA_CH_ENABLE)>>7)
343
344/* STBx25xxx dma unique */
345/* enable device port on a dma channel
346 * example ext 0 on dma 1
347 */
348
349#define SSP0_RECV 15
350#define SSP0_XMIT 14
351#define EXT_DMA_0 12
352#define SC1_XMIT 11
353#define SC1_RECV 10
354#define EXT_DMA_2 9
355#define EXT_DMA_3 8
356#define SERIAL2_XMIT 7
357#define SERIAL2_RECV 6
358#define SC0_XMIT 5
359#define SC0_RECV 4
360#define SERIAL1_XMIT 3
361#define SERIAL1_RECV 2
362#define SERIAL0_XMIT 1
363#define SERIAL0_RECV 0
364
365#define DMA_CHAN_0 1
366#define DMA_CHAN_1 2
367#define DMA_CHAN_2 3
368#define DMA_CHAN_3 4
369
370/* end STBx25xx */
371
372/*
373 * Bit 30 must be one for Redwoods, otherwise transfers may receive errors.
374 */
375#define DMA_CR_MB0 0x2
376
377#define SET_DMA_CONTROL \
378 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
379 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
380 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
381 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
382 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
383 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
384 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
385 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
386 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
387 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
388 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
389 SET_DMA_TCD(p_init->tcd_disable) | /* TC chain mode disable */ \
390 SET_DMA_ECE(p_init->ece_enable) | /* ECE chanin mode enable */ \
391 SET_DMA_CH(p_init->ch_enable) | /* Chain enable */ \
392 DMA_CR_MB0 /* must be one */)
393
394#define GET_DMA_POLARITY(chan) chan
395
396#endif
397
398typedef struct {
399 unsigned short in_use; /* set when channel is being used, clr when
400 * available.
401 */
402 /*
403 * Valid polarity settings:
404 * DMAReq_ActiveLow(n)
405 * DMAAck_ActiveLow(n)
406 * EOT_ActiveLow(n)
407 *
408 * n is 0 to max dma chans
409 */
410 unsigned int polarity;
411
412 char buffer_enable; /* Boolean: buffer enable */
413 char tce_enable; /* Boolean: terminal count enable */
414 char etd_output; /* Boolean: eot pin is a tc output */
415 char pce; /* Boolean: parity check enable */
416
417 /*
418 * Peripheral location:
419 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
420 * EXTERNAL_PERIPHERAL
421 */
422 char pl; /* internal/external peripheral */
423
424 /*
425 * Valid pwidth settings:
426 * PW_8
427 * PW_16
428 * PW_32
429 * PW_64
430 */
431 unsigned int pwidth;
432
433 char dai; /* Boolean: dst address increment */
434 char sai; /* Boolean: src address increment */
435
436 /*
437 * Valid psc settings: 0-3
438 */
439 unsigned int psc; /* Peripheral Setup Cycles */
440
441 /*
442 * Valid pwc settings:
443 * 0-63
444 */
445 unsigned int pwc; /* Peripheral Wait Cycles */
446
447 /*
448 * Valid phc settings:
449 * 0-7
450 */
451 unsigned int phc; /* Peripheral Hold Cycles */
452
453 /*
454 * Valid cp (channel priority) settings:
455 * PRIORITY_LOW
456 * PRIORITY_MID_LOW
457 * PRIORITY_MID_HIGH
458 * PRIORITY_HIGH
459 */
460 unsigned int cp; /* channel priority */
461
462 /*
463 * Valid pf (memory read prefetch) settings:
464 *
465 * PREFETCH_1
466 * PREFETCH_2
467 * PREFETCH_4
468 */
469 unsigned int pf; /* memory read prefetch */
470
471 /*
472 * Boolean: channel interrupt enable
473 * NOTE: for sgl transfers, only the last descriptor will be setup to
474 * interrupt.
475 */
476 char int_enable;
477
478 char shift; /* easy access to byte_count shift, based on */
479 /* the width of the channel */
480
481 uint32_t control; /* channel control word */
482
483 /* These variabled are used ONLY in single dma transfers */
484 unsigned int mode; /* transfer mode */
485 phys_addr_t addr;
486 char ce; /* channel enable */
487#ifdef CONFIG_STB03xxx
488 char ch_enable;
489 char tcd_disable;
490 char ece_enable;
491 char td; /* transfer direction */
492#endif
493
494 char int_on_final_sg;/* for scatter/gather - only interrupt on last sg */
495} ppc_dma_ch_t;
496
497/*
498 * PPC44x DMA implementations have a slightly different
499 * descriptor layout. Probably moved about due to the
500 * change to 64-bit addresses and link pointer. I don't
501 * know why they didn't just leave control_count after
502 * the dst_addr.
503 */
504#ifdef PPC4xx_DMA_64BIT
505typedef struct {
506 uint32_t control;
507 uint32_t control_count;
508 phys_addr_t src_addr;
509 phys_addr_t dst_addr;
510 phys_addr_t next;
511} ppc_sgl_t;
512#else
513typedef struct {
514 uint32_t control;
515 phys_addr_t src_addr;
516 phys_addr_t dst_addr;
517 uint32_t control_count;
518 uint32_t next;
519} ppc_sgl_t;
520#endif
521
522typedef struct {
523 unsigned int dmanr;
524 uint32_t control; /* channel ctrl word; loaded from each descrptr */
525 uint32_t sgl_control; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
526 dma_addr_t dma_addr; /* dma (physical) address of this list */
527 ppc_sgl_t *phead;
528 dma_addr_t phead_dma;
529 ppc_sgl_t *ptail;
530 dma_addr_t ptail_dma;
531} sgl_list_info_t;
532
533typedef struct {
534 phys_addr_t *src_addr;
535 phys_addr_t *dst_addr;
536 phys_addr_t dma_src_addr;
537 phys_addr_t dma_dst_addr;
538} pci_alloc_desc_t;
539
540extern ppc_dma_ch_t dma_channels[];
541
542/*
543 * The DMA API are in ppc4xx_dma.c and ppc4xx_sgdma.c
544 */
545extern int ppc4xx_init_dma_channel(unsigned int, ppc_dma_ch_t *);
546extern int ppc4xx_get_channel_config(unsigned int, ppc_dma_ch_t *);
547extern int ppc4xx_set_channel_priority(unsigned int, unsigned int);
548extern unsigned int ppc4xx_get_peripheral_width(unsigned int);
549extern void ppc4xx_set_sg_addr(int, phys_addr_t);
550extern int ppc4xx_add_dma_sgl(sgl_handle_t, phys_addr_t, phys_addr_t, unsigned int);
551extern void ppc4xx_enable_dma_sgl(sgl_handle_t);
552extern void ppc4xx_disable_dma_sgl(sgl_handle_t);
553extern int ppc4xx_get_dma_sgl_residue(sgl_handle_t, phys_addr_t *, phys_addr_t *);
554extern int ppc4xx_delete_dma_sgl_element(sgl_handle_t, phys_addr_t *, phys_addr_t *);
555extern int ppc4xx_alloc_dma_handle(sgl_handle_t *, unsigned int, unsigned int);
556extern void ppc4xx_free_dma_handle(sgl_handle_t);
557extern int ppc4xx_get_dma_status(void);
558extern int ppc4xx_enable_burst(unsigned int);
559extern int ppc4xx_disable_burst(unsigned int);
560extern int ppc4xx_set_burst_size(unsigned int, unsigned int);
561extern void ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr);
562extern void ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr);
563extern void ppc4xx_enable_dma(unsigned int dmanr);
564extern void ppc4xx_disable_dma(unsigned int dmanr);
565extern void ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count);
566extern int ppc4xx_get_dma_residue(unsigned int dmanr);
567extern void ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
568 phys_addr_t dst_dma_addr);
569extern int ppc4xx_enable_dma_interrupt(unsigned int dmanr);
570extern int ppc4xx_disable_dma_interrupt(unsigned int dmanr);
571extern int ppc4xx_clr_dma_status(unsigned int dmanr);
572extern int ppc4xx_map_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
573extern int ppc4xx_disable_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
574extern int ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode);
575
576/* These are in kernel/dma.c: */
577
578/* reserve a DMA channel */
579extern int request_dma(unsigned int dmanr, const char *device_id);
580/* release it again */
581extern void free_dma(unsigned int dmanr);
582#endif
583#endif /* __KERNEL__ */