1 #ifndef _ASM_DMA_MAPPING_H_
2 #define _ASM_DMA_MAPPING_H_
5 * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
9 #include <linux/scatterlist.h>
11 #include <asm/swiotlb.h>
13 extern dma_addr_t bad_dma_address;
15 struct dma_mapping_ops {
16 int (*mapping_error)(dma_addr_t dma_addr);
17 void* (*alloc_coherent)(struct device *dev, size_t size,
18 dma_addr_t *dma_handle, gfp_t gfp);
19 void (*free_coherent)(struct device *dev, size_t size,
20 void *vaddr, dma_addr_t dma_handle);
21 dma_addr_t (*map_single)(struct device *hwdev, phys_addr_t ptr,
22 size_t size, int direction);
23 /* like map_single, but doesn't check the device mask */
24 dma_addr_t (*map_simple)(struct device *hwdev, phys_addr_t ptr,
25 size_t size, int direction);
26 void (*unmap_single)(struct device *dev, dma_addr_t addr,
27 size_t size, int direction);
28 void (*sync_single_for_cpu)(struct device *hwdev,
29 dma_addr_t dma_handle, size_t size,
31 void (*sync_single_for_device)(struct device *hwdev,
32 dma_addr_t dma_handle, size_t size,
34 void (*sync_single_range_for_cpu)(struct device *hwdev,
35 dma_addr_t dma_handle, unsigned long offset,
36 size_t size, int direction);
37 void (*sync_single_range_for_device)(struct device *hwdev,
38 dma_addr_t dma_handle, unsigned long offset,
39 size_t size, int direction);
40 void (*sync_sg_for_cpu)(struct device *hwdev,
41 struct scatterlist *sg, int nelems,
43 void (*sync_sg_for_device)(struct device *hwdev,
44 struct scatterlist *sg, int nelems,
46 int (*map_sg)(struct device *hwdev, struct scatterlist *sg,
47 int nents, int direction);
48 void (*unmap_sg)(struct device *hwdev,
49 struct scatterlist *sg, int nents,
51 int (*dma_supported)(struct device *hwdev, u64 mask);
55 extern const struct dma_mapping_ops *dma_ops;
57 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
58 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
60 void *dma_alloc_coherent(struct device *dev, size_t size,
61 dma_addr_t *dma_handle, gfp_t flag);
63 void dma_free_coherent(struct device *dev, size_t size,
64 void *vaddr, dma_addr_t dma_handle);
67 extern int dma_supported(struct device *hwdev, u64 mask);
68 extern int dma_set_mask(struct device *dev, u64 mask);
71 # include "dma-mapping_32.h"
73 # include "dma-mapping_64.h"
76 static inline dma_addr_t
77 dma_map_single(struct device *hwdev, void *ptr, size_t size,
80 BUG_ON(!valid_dma_direction(direction));
81 return dma_ops->map_single(hwdev, virt_to_phys(ptr), size, direction);
85 dma_unmap_single(struct device *dev, dma_addr_t addr, size_t size,
88 BUG_ON(!valid_dma_direction(direction));
89 if (dma_ops->unmap_single)
90 dma_ops->unmap_single(dev, addr, size, direction);
94 dma_map_sg(struct device *hwdev, struct scatterlist *sg,
95 int nents, int direction)
97 BUG_ON(!valid_dma_direction(direction));
98 return dma_ops->map_sg(hwdev, sg, nents, direction);
102 dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
105 BUG_ON(!valid_dma_direction(direction));
106 if (dma_ops->unmap_sg)
107 dma_ops->unmap_sg(hwdev, sg, nents, direction);
111 dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
112 size_t size, int direction)
114 BUG_ON(!valid_dma_direction(direction));
115 if (dma_ops->sync_single_for_cpu)
116 dma_ops->sync_single_for_cpu(hwdev, dma_handle, size,
118 flush_write_buffers();
122 dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle,
123 size_t size, int direction)
125 BUG_ON(!valid_dma_direction(direction));
126 if (dma_ops->sync_single_for_device)
127 dma_ops->sync_single_for_device(hwdev, dma_handle, size,
129 flush_write_buffers();
133 dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
134 unsigned long offset, size_t size, int direction)
136 BUG_ON(!valid_dma_direction(direction));
137 if (dma_ops->sync_single_range_for_cpu)
138 dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset,
141 flush_write_buffers();
145 dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle,
146 unsigned long offset, size_t size,
149 BUG_ON(!valid_dma_direction(direction));
150 if (dma_ops->sync_single_range_for_device)
151 dma_ops->sync_single_range_for_device(hwdev, dma_handle,
152 offset, size, direction);
154 flush_write_buffers();
158 dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
159 int nelems, int direction)
161 BUG_ON(!valid_dma_direction(direction));
162 if (dma_ops->sync_sg_for_cpu)
163 dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction);
164 flush_write_buffers();
168 dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
169 int nelems, int direction)
171 BUG_ON(!valid_dma_direction(direction));
172 if (dma_ops->sync_sg_for_device)
173 dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction);
175 flush_write_buffers();
178 static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
179 size_t offset, size_t size,
182 BUG_ON(!valid_dma_direction(direction));
183 return dma_ops->map_single(dev, page_to_phys(page)+offset,
187 static inline void dma_unmap_page(struct device *dev, dma_addr_t addr,
188 size_t size, int direction)
190 dma_unmap_single(dev, addr, size, direction);
194 dma_cache_sync(struct device *dev, void *vaddr, size_t size,
195 enum dma_data_direction dir)
197 flush_write_buffers();