| Robert P. J. Day | 96532ba | 2008-02-03 15:06:26 +0200 | [diff] [blame] | 1 | #ifndef _LINUX_DMA_MAPPING_H | 
|  | 2 | #define _LINUX_DMA_MAPPING_H | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 |  | 
|  | 4 | #include <linux/device.h> | 
|  | 5 | #include <linux/err.h> | 
| FUJITA Tomonori | f0402a2 | 2009-01-05 23:59:01 +0900 | [diff] [blame] | 6 | #include <linux/dma-attrs.h> | 
| Alexey Dobriyan | b7f080c | 2011-06-16 11:01:34 +0000 | [diff] [blame] | 7 | #include <linux/dma-direction.h> | 
| FUJITA Tomonori | f0402a2 | 2009-01-05 23:59:01 +0900 | [diff] [blame] | 8 | #include <linux/scatterlist.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 |  | 
| FUJITA Tomonori | f0402a2 | 2009-01-05 23:59:01 +0900 | [diff] [blame] | 10 | struct dma_map_ops { | 
|  | 11 | void* (*alloc_coherent)(struct device *dev, size_t size, | 
|  | 12 | dma_addr_t *dma_handle, gfp_t gfp); | 
|  | 13 | void (*free_coherent)(struct device *dev, size_t size, | 
|  | 14 | void *vaddr, dma_addr_t dma_handle); | 
|  | 15 | dma_addr_t (*map_page)(struct device *dev, struct page *page, | 
|  | 16 | unsigned long offset, size_t size, | 
|  | 17 | enum dma_data_direction dir, | 
|  | 18 | struct dma_attrs *attrs); | 
|  | 19 | void (*unmap_page)(struct device *dev, dma_addr_t dma_handle, | 
|  | 20 | size_t size, enum dma_data_direction dir, | 
|  | 21 | struct dma_attrs *attrs); | 
|  | 22 | int (*map_sg)(struct device *dev, struct scatterlist *sg, | 
|  | 23 | int nents, enum dma_data_direction dir, | 
|  | 24 | struct dma_attrs *attrs); | 
|  | 25 | void (*unmap_sg)(struct device *dev, | 
|  | 26 | struct scatterlist *sg, int nents, | 
|  | 27 | enum dma_data_direction dir, | 
|  | 28 | struct dma_attrs *attrs); | 
|  | 29 | void (*sync_single_for_cpu)(struct device *dev, | 
|  | 30 | dma_addr_t dma_handle, size_t size, | 
|  | 31 | enum dma_data_direction dir); | 
|  | 32 | void (*sync_single_for_device)(struct device *dev, | 
|  | 33 | dma_addr_t dma_handle, size_t size, | 
|  | 34 | enum dma_data_direction dir); | 
| FUJITA Tomonori | f0402a2 | 2009-01-05 23:59:01 +0900 | [diff] [blame] | 35 | void (*sync_sg_for_cpu)(struct device *dev, | 
|  | 36 | struct scatterlist *sg, int nents, | 
|  | 37 | enum dma_data_direction dir); | 
|  | 38 | void (*sync_sg_for_device)(struct device *dev, | 
|  | 39 | struct scatterlist *sg, int nents, | 
|  | 40 | enum dma_data_direction dir); | 
|  | 41 | int (*mapping_error)(struct device *dev, dma_addr_t dma_addr); | 
|  | 42 | int (*dma_supported)(struct device *dev, u64 mask); | 
| FUJITA Tomonori | f726f30e | 2009-08-04 19:08:24 +0000 | [diff] [blame] | 43 | int (*set_dma_mask)(struct device *dev, u64 mask); | 
| FUJITA Tomonori | f0402a2 | 2009-01-05 23:59:01 +0900 | [diff] [blame] | 44 | int is_phys; | 
|  | 45 | }; | 
|  | 46 |  | 
| Andrew Morton | 8f286c3 | 2007-10-18 03:05:07 -0700 | [diff] [blame] | 47 | #define DMA_BIT_MASK(n)	(((n) == 64) ? ~0ULL : ((1ULL<<(n))-1)) | 
| Borislav Petkov | 34c6538 | 2007-10-18 03:05:06 -0700 | [diff] [blame] | 48 |  | 
| James Bottomley | 32e8f70 | 2007-10-16 01:23:55 -0700 | [diff] [blame] | 49 | #define DMA_MASK_NONE	0x0ULL | 
|  | 50 |  | 
| Rolf Eike Beer | d6bd3a3 | 2006-09-29 01:59:48 -0700 | [diff] [blame] | 51 | static inline int valid_dma_direction(int dma_direction) | 
|  | 52 | { | 
|  | 53 | return ((dma_direction == DMA_BIDIRECTIONAL) || | 
|  | 54 | (dma_direction == DMA_TO_DEVICE) || | 
|  | 55 | (dma_direction == DMA_FROM_DEVICE)); | 
|  | 56 | } | 
|  | 57 |  | 
| James Bottomley | 32e8f70 | 2007-10-16 01:23:55 -0700 | [diff] [blame] | 58 | static inline int is_device_dma_capable(struct device *dev) | 
|  | 59 | { | 
|  | 60 | return dev->dma_mask != NULL && *dev->dma_mask != DMA_MASK_NONE; | 
|  | 61 | } | 
|  | 62 |  | 
| Dan Williams | 1b0fac4 | 2007-07-15 23:40:26 -0700 | [diff] [blame] | 63 | #ifdef CONFIG_HAS_DMA | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | #include <asm/dma-mapping.h> | 
| Dan Williams | 1b0fac4 | 2007-07-15 23:40:26 -0700 | [diff] [blame] | 65 | #else | 
|  | 66 | #include <asm-generic/dma-mapping-broken.h> | 
|  | 67 | #endif | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 |  | 
| FUJITA Tomonori | 589fc9a | 2008-09-12 19:42:34 +0900 | [diff] [blame] | 69 | static inline u64 dma_get_mask(struct device *dev) | 
|  | 70 | { | 
| FUJITA Tomonori | 07a2c01 | 2008-09-19 02:02:05 +0900 | [diff] [blame] | 71 | if (dev && dev->dma_mask && *dev->dma_mask) | 
| FUJITA Tomonori | 589fc9a | 2008-09-12 19:42:34 +0900 | [diff] [blame] | 72 | return *dev->dma_mask; | 
| Yang Hongyang | 284901a9 | 2009-04-06 19:01:15 -0700 | [diff] [blame] | 73 | return DMA_BIT_MASK(32); | 
| FUJITA Tomonori | 589fc9a | 2008-09-12 19:42:34 +0900 | [diff] [blame] | 74 | } | 
|  | 75 |  | 
| FUJITA Tomonori | 710224f | 2010-09-22 13:04:55 -0700 | [diff] [blame] | 76 | #ifdef ARCH_HAS_DMA_SET_COHERENT_MASK | 
|  | 77 | int dma_set_coherent_mask(struct device *dev, u64 mask); | 
|  | 78 | #else | 
| FUJITA Tomonori | 6a1961f | 2010-03-10 15:23:39 -0800 | [diff] [blame] | 79 | static inline int dma_set_coherent_mask(struct device *dev, u64 mask) | 
|  | 80 | { | 
|  | 81 | if (!dma_supported(dev, mask)) | 
|  | 82 | return -EIO; | 
|  | 83 | dev->coherent_dma_mask = mask; | 
|  | 84 | return 0; | 
|  | 85 | } | 
| FUJITA Tomonori | 710224f | 2010-09-22 13:04:55 -0700 | [diff] [blame] | 86 | #endif | 
| FUJITA Tomonori | 6a1961f | 2010-03-10 15:23:39 -0800 | [diff] [blame] | 87 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | extern u64 dma_get_required_mask(struct device *dev); | 
|  | 89 |  | 
| FUJITA Tomonori | 6b7b651 | 2008-02-04 22:27:55 -0800 | [diff] [blame] | 90 | static inline unsigned int dma_get_max_seg_size(struct device *dev) | 
|  | 91 | { | 
|  | 92 | return dev->dma_parms ? dev->dma_parms->max_segment_size : 65536; | 
|  | 93 | } | 
|  | 94 |  | 
|  | 95 | static inline unsigned int dma_set_max_seg_size(struct device *dev, | 
|  | 96 | unsigned int size) | 
|  | 97 | { | 
|  | 98 | if (dev->dma_parms) { | 
|  | 99 | dev->dma_parms->max_segment_size = size; | 
|  | 100 | return 0; | 
|  | 101 | } else | 
|  | 102 | return -EIO; | 
|  | 103 | } | 
|  | 104 |  | 
| FUJITA Tomonori | d22a696 | 2008-02-04 22:28:13 -0800 | [diff] [blame] | 105 | static inline unsigned long dma_get_seg_boundary(struct device *dev) | 
|  | 106 | { | 
|  | 107 | return dev->dma_parms ? | 
|  | 108 | dev->dma_parms->segment_boundary_mask : 0xffffffff; | 
|  | 109 | } | 
|  | 110 |  | 
|  | 111 | static inline int dma_set_seg_boundary(struct device *dev, unsigned long mask) | 
|  | 112 | { | 
|  | 113 | if (dev->dma_parms) { | 
|  | 114 | dev->dma_parms->segment_boundary_mask = mask; | 
|  | 115 | return 0; | 
|  | 116 | } else | 
|  | 117 | return -EIO; | 
|  | 118 | } | 
|  | 119 |  | 
| Heiko Carstens | e259f19 | 2010-08-13 09:39:18 +0200 | [diff] [blame] | 120 | #ifdef CONFIG_HAS_DMA | 
| FUJITA Tomonori | 4565f01 | 2010-08-10 18:03:22 -0700 | [diff] [blame] | 121 | static inline int dma_get_cache_alignment(void) | 
|  | 122 | { | 
|  | 123 | #ifdef ARCH_DMA_MINALIGN | 
|  | 124 | return ARCH_DMA_MINALIGN; | 
|  | 125 | #endif | 
|  | 126 | return 1; | 
|  | 127 | } | 
| Heiko Carstens | e259f19 | 2010-08-13 09:39:18 +0200 | [diff] [blame] | 128 | #endif | 
| FUJITA Tomonori | 4565f01 | 2010-08-10 18:03:22 -0700 | [diff] [blame] | 129 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 130 | /* flags for the coherent memory api */ | 
|  | 131 | #define	DMA_MEMORY_MAP			0x01 | 
|  | 132 | #define DMA_MEMORY_IO			0x02 | 
|  | 133 | #define DMA_MEMORY_INCLUDES_CHILDREN	0x04 | 
|  | 134 | #define DMA_MEMORY_EXCLUSIVE		0x08 | 
|  | 135 |  | 
|  | 136 | #ifndef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY | 
|  | 137 | static inline int | 
|  | 138 | dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr, | 
|  | 139 | dma_addr_t device_addr, size_t size, int flags) | 
|  | 140 | { | 
|  | 141 | return 0; | 
|  | 142 | } | 
|  | 143 |  | 
|  | 144 | static inline void | 
|  | 145 | dma_release_declared_memory(struct device *dev) | 
|  | 146 | { | 
|  | 147 | } | 
|  | 148 |  | 
|  | 149 | static inline void * | 
|  | 150 | dma_mark_declared_memory_occupied(struct device *dev, | 
|  | 151 | dma_addr_t device_addr, size_t size) | 
|  | 152 | { | 
|  | 153 | return ERR_PTR(-EBUSY); | 
|  | 154 | } | 
|  | 155 | #endif | 
|  | 156 |  | 
| Tejun Heo | 9ac7849 | 2007-01-20 16:00:26 +0900 | [diff] [blame] | 157 | /* | 
|  | 158 | * Managed DMA API | 
|  | 159 | */ | 
|  | 160 | extern void *dmam_alloc_coherent(struct device *dev, size_t size, | 
|  | 161 | dma_addr_t *dma_handle, gfp_t gfp); | 
|  | 162 | extern void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, | 
|  | 163 | dma_addr_t dma_handle); | 
|  | 164 | extern void *dmam_alloc_noncoherent(struct device *dev, size_t size, | 
|  | 165 | dma_addr_t *dma_handle, gfp_t gfp); | 
|  | 166 | extern void dmam_free_noncoherent(struct device *dev, size_t size, void *vaddr, | 
|  | 167 | dma_addr_t dma_handle); | 
|  | 168 | #ifdef ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY | 
|  | 169 | extern int dmam_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr, | 
|  | 170 | dma_addr_t device_addr, size_t size, | 
|  | 171 | int flags); | 
|  | 172 | extern void dmam_release_declared_memory(struct device *dev); | 
|  | 173 | #else /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */ | 
|  | 174 | static inline int dmam_declare_coherent_memory(struct device *dev, | 
|  | 175 | dma_addr_t bus_addr, dma_addr_t device_addr, | 
|  | 176 | size_t size, gfp_t gfp) | 
|  | 177 | { | 
|  | 178 | return 0; | 
|  | 179 | } | 
|  | 180 |  | 
|  | 181 | static inline void dmam_release_declared_memory(struct device *dev) | 
|  | 182 | { | 
|  | 183 | } | 
|  | 184 | #endif /* ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY */ | 
|  | 185 |  | 
| Arthur Kepner | 74bc7ce | 2008-04-29 01:00:30 -0700 | [diff] [blame] | 186 | #ifndef CONFIG_HAVE_DMA_ATTRS | 
|  | 187 | struct dma_attrs; | 
|  | 188 |  | 
|  | 189 | #define dma_map_single_attrs(dev, cpu_addr, size, dir, attrs) \ | 
|  | 190 | dma_map_single(dev, cpu_addr, size, dir) | 
|  | 191 |  | 
|  | 192 | #define dma_unmap_single_attrs(dev, dma_addr, size, dir, attrs) \ | 
|  | 193 | dma_unmap_single(dev, dma_addr, size, dir) | 
|  | 194 |  | 
|  | 195 | #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \ | 
|  | 196 | dma_map_sg(dev, sgl, nents, dir) | 
|  | 197 |  | 
|  | 198 | #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \ | 
|  | 199 | dma_unmap_sg(dev, sgl, nents, dir) | 
|  | 200 |  | 
|  | 201 | #endif /* CONFIG_HAVE_DMA_ATTRS */ | 
|  | 202 |  | 
| FUJITA Tomonori | 0acedc1 | 2010-03-10 15:23:31 -0800 | [diff] [blame] | 203 | #ifdef CONFIG_NEED_DMA_MAP_STATE | 
|  | 204 | #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME)        dma_addr_t ADDR_NAME | 
|  | 205 | #define DEFINE_DMA_UNMAP_LEN(LEN_NAME)          __u32 LEN_NAME | 
|  | 206 | #define dma_unmap_addr(PTR, ADDR_NAME)           ((PTR)->ADDR_NAME) | 
|  | 207 | #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL)  (((PTR)->ADDR_NAME) = (VAL)) | 
|  | 208 | #define dma_unmap_len(PTR, LEN_NAME)             ((PTR)->LEN_NAME) | 
|  | 209 | #define dma_unmap_len_set(PTR, LEN_NAME, VAL)    (((PTR)->LEN_NAME) = (VAL)) | 
|  | 210 | #else | 
|  | 211 | #define DEFINE_DMA_UNMAP_ADDR(ADDR_NAME) | 
|  | 212 | #define DEFINE_DMA_UNMAP_LEN(LEN_NAME) | 
|  | 213 | #define dma_unmap_addr(PTR, ADDR_NAME)           (0) | 
|  | 214 | #define dma_unmap_addr_set(PTR, ADDR_NAME, VAL)  do { } while (0) | 
|  | 215 | #define dma_unmap_len(PTR, LEN_NAME)             (0) | 
|  | 216 | #define dma_unmap_len_set(PTR, LEN_NAME, VAL)    do { } while (0) | 
|  | 217 | #endif | 
|  | 218 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 219 | #endif |