| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ASM_SH_DMA_MAPPING_H | 
 | 2 | #define __ASM_SH_DMA_MAPPING_H | 
 | 3 |  | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 4 | extern struct dma_map_ops *dma_ops; | 
 | 5 | extern void no_iommu_init(void); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 |  | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 7 | static inline struct dma_map_ops *get_dma_ops(struct device *dev) | 
 | 8 | { | 
 | 9 | 	return dma_ops; | 
 | 10 | } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 11 |  | 
| Paul Mundt | f32154c | 2009-10-26 09:50:51 +0900 | [diff] [blame] | 12 | #include <asm-generic/dma-coherent.h> | 
 | 13 | #include <asm-generic/dma-mapping-common.h> | 
 | 14 |  | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 15 | static inline int dma_supported(struct device *dev, u64 mask) | 
 | 16 | { | 
 | 17 | 	struct dma_map_ops *ops = get_dma_ops(dev); | 
 | 18 |  | 
 | 19 | 	if (ops->dma_supported) | 
 | 20 | 		return ops->dma_supported(dev, mask); | 
 | 21 |  | 
 | 22 | 	return 1; | 
 | 23 | } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 |  | 
 | 25 | static inline int dma_set_mask(struct device *dev, u64 mask) | 
 | 26 | { | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 27 | 	struct dma_map_ops *ops = get_dma_ops(dev); | 
 | 28 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 29 | 	if (!dev->dma_mask || !dma_supported(dev, mask)) | 
 | 30 | 		return -EIO; | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 31 | 	if (ops->set_dma_mask) | 
 | 32 | 		return ops->set_dma_mask(dev, mask); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 33 |  | 
 | 34 | 	*dev->dma_mask = mask; | 
 | 35 |  | 
 | 36 | 	return 0; | 
 | 37 | } | 
 | 38 |  | 
| Magnus Damm | f93e97e | 2008-01-24 18:35:10 +0900 | [diff] [blame] | 39 | void dma_cache_sync(struct device *dev, void *vaddr, size_t size, | 
 | 40 | 		    enum dma_data_direction dir); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 41 |  | 
| Paul Mundt | c7666e7 | 2007-02-13 11:11:22 +0900 | [diff] [blame] | 42 | #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f) | 
 | 43 | #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h) | 
| Paul Mundt | 478fb15 | 2009-10-27 10:41:58 +0900 | [diff] [blame] | 44 |  | 
| FUJITA Tomonori | 8d8bb39 | 2008-07-25 19:44:49 -0700 | [diff] [blame] | 45 | static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | { | 
| Paul Mundt | 73c926b | 2009-10-20 12:55:56 +0900 | [diff] [blame] | 47 | 	struct dma_map_ops *ops = get_dma_ops(dev); | 
 | 48 |  | 
 | 49 | 	if (ops->mapping_error) | 
 | 50 | 		return ops->mapping_error(dev, dma_addr); | 
 | 51 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 52 | 	return dma_addr == 0; | 
 | 53 | } | 
| Magnus Damm | f93e97e | 2008-01-24 18:35:10 +0900 | [diff] [blame] | 54 |  | 
| Paul Mundt | f32154c | 2009-10-26 09:50:51 +0900 | [diff] [blame] | 55 | static inline void *dma_alloc_coherent(struct device *dev, size_t size, | 
 | 56 | 				       dma_addr_t *dma_handle, gfp_t gfp) | 
 | 57 | { | 
 | 58 | 	struct dma_map_ops *ops = get_dma_ops(dev); | 
 | 59 | 	void *memory; | 
 | 60 |  | 
 | 61 | 	if (dma_alloc_from_coherent(dev, size, dma_handle, &memory)) | 
 | 62 | 		return memory; | 
 | 63 | 	if (!ops->alloc_coherent) | 
 | 64 | 		return NULL; | 
 | 65 |  | 
 | 66 | 	memory = ops->alloc_coherent(dev, size, dma_handle, gfp); | 
 | 67 | 	debug_dma_alloc_coherent(dev, size, *dma_handle, memory); | 
 | 68 |  | 
 | 69 | 	return memory; | 
 | 70 | } | 
 | 71 |  | 
 | 72 | static inline void dma_free_coherent(struct device *dev, size_t size, | 
 | 73 | 				     void *vaddr, dma_addr_t dma_handle) | 
 | 74 | { | 
 | 75 | 	struct dma_map_ops *ops = get_dma_ops(dev); | 
 | 76 |  | 
| Paul Mundt | f32154c | 2009-10-26 09:50:51 +0900 | [diff] [blame] | 77 | 	if (dma_release_from_coherent(dev, get_order(size), vaddr)) | 
 | 78 | 		return; | 
 | 79 |  | 
 | 80 | 	debug_dma_free_coherent(dev, size, vaddr, dma_handle); | 
 | 81 | 	if (ops->free_coherent) | 
 | 82 | 		ops->free_coherent(dev, size, vaddr, dma_handle); | 
 | 83 | } | 
 | 84 |  | 
 | 85 | /* arch/sh/mm/consistent.c */ | 
 | 86 | extern void *dma_generic_alloc_coherent(struct device *dev, size_t size, | 
 | 87 | 					dma_addr_t *dma_addr, gfp_t flag); | 
 | 88 | extern void dma_generic_free_coherent(struct device *dev, size_t size, | 
 | 89 | 				      void *vaddr, dma_addr_t dma_handle); | 
| Magnus Damm | f93e97e | 2008-01-24 18:35:10 +0900 | [diff] [blame] | 90 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 91 | #endif /* __ASM_SH_DMA_MAPPING_H */ |