| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2006 Benjamin Herrenschmidt, IBM Corporation | 
 | 3 |  * | 
 | 4 |  * Provide default implementations of the DMA mapping callbacks for | 
 | 5 |  * busses using the iommu infrastructure | 
 | 6 |  */ | 
 | 7 |  | 
| Paul Gortmaker | 66b15db | 2011-05-27 10:46:24 -0400 | [diff] [blame] | 8 | #include <linux/export.h> | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 9 | #include <asm/iommu.h> | 
 | 10 |  | 
 | 11 | /* | 
 | 12 |  * Generic iommu implementation | 
 | 13 |  */ | 
 | 14 |  | 
 | 15 | /* Allocates a contiguous real buffer and creates mappings over it. | 
 | 16 |  * Returns the virtual address of the buffer and sets dma_handle | 
 | 17 |  * to the dma address (mapping) of the first page. | 
 | 18 |  */ | 
 | 19 | static void *dma_iommu_alloc_coherent(struct device *dev, size_t size, | 
 | 20 | 				      dma_addr_t *dma_handle, gfp_t flag) | 
 | 21 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 22 | 	return iommu_alloc_coherent(dev, get_iommu_table_base(dev), size, | 
| Nishanth Aravamudan | b3c7385 | 2010-10-18 07:27:04 +0000 | [diff] [blame] | 23 | 				    dma_handle, dev->coherent_dma_mask, flag, | 
| Becky Bruce | 8fae035 | 2008-09-08 09:09:54 +0000 | [diff] [blame] | 24 | 				    dev_to_node(dev)); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 25 | } | 
 | 26 |  | 
 | 27 | static void dma_iommu_free_coherent(struct device *dev, size_t size, | 
 | 28 | 				    void *vaddr, dma_addr_t dma_handle) | 
 | 29 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 30 | 	iommu_free_coherent(get_iommu_table_base(dev), size, vaddr, dma_handle); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 31 | } | 
 | 32 |  | 
 | 33 | /* Creates TCEs for a user provided buffer.  The user buffer must be | 
| Mark Nelson | f9226d5 | 2008-10-27 20:38:08 +0000 | [diff] [blame] | 34 |  * contiguous real kernel storage (not vmalloc).  The address passed here | 
 | 35 |  * comprises a page address and offset into that page. The dma_addr_t | 
 | 36 |  * returned will point to the same byte within the page as was passed in. | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 37 |  */ | 
| Mark Nelson | f9226d5 | 2008-10-27 20:38:08 +0000 | [diff] [blame] | 38 | static dma_addr_t dma_iommu_map_page(struct device *dev, struct page *page, | 
 | 39 | 				     unsigned long offset, size_t size, | 
 | 40 | 				     enum dma_data_direction direction, | 
 | 41 | 				     struct dma_attrs *attrs) | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 42 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 43 | 	return iommu_map_page(dev, get_iommu_table_base(dev), page, offset, | 
 | 44 | 			      size, device_to_mask(dev), direction, attrs); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 45 | } | 
 | 46 |  | 
 | 47 |  | 
| Mark Nelson | f9226d5 | 2008-10-27 20:38:08 +0000 | [diff] [blame] | 48 | static void dma_iommu_unmap_page(struct device *dev, dma_addr_t dma_handle, | 
 | 49 | 				 size_t size, enum dma_data_direction direction, | 
 | 50 | 				 struct dma_attrs *attrs) | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 51 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 52 | 	iommu_unmap_page(get_iommu_table_base(dev), dma_handle, size, direction, | 
| Mark Nelson | f9226d5 | 2008-10-27 20:38:08 +0000 | [diff] [blame] | 53 | 			 attrs); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 54 | } | 
 | 55 |  | 
 | 56 |  | 
 | 57 | static int dma_iommu_map_sg(struct device *dev, struct scatterlist *sglist, | 
 | 58 | 			    int nelems, enum dma_data_direction direction, | 
 | 59 | 			    struct dma_attrs *attrs) | 
 | 60 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 61 | 	return iommu_map_sg(dev, get_iommu_table_base(dev), sglist, nelems, | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 62 | 			    device_to_mask(dev), direction, attrs); | 
 | 63 | } | 
 | 64 |  | 
 | 65 | static void dma_iommu_unmap_sg(struct device *dev, struct scatterlist *sglist, | 
 | 66 | 		int nelems, enum dma_data_direction direction, | 
 | 67 | 		struct dma_attrs *attrs) | 
 | 68 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 69 | 	iommu_unmap_sg(get_iommu_table_base(dev), sglist, nelems, direction, | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 70 | 		       attrs); | 
 | 71 | } | 
 | 72 |  | 
 | 73 | /* We support DMA to/from any memory page via the iommu */ | 
 | 74 | static int dma_iommu_dma_supported(struct device *dev, u64 mask) | 
 | 75 | { | 
| Becky Bruce | 738ef42 | 2009-09-21 08:26:35 +0000 | [diff] [blame] | 76 | 	struct iommu_table *tbl = get_iommu_table_base(dev); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 77 |  | 
| Nishanth Aravamudan | 1cb8e85 | 2010-09-15 08:05:45 +0000 | [diff] [blame] | 78 | 	if (!tbl) { | 
 | 79 | 		dev_info(dev, "Warning: IOMMU dma not supported: mask 0x%08llx" | 
 | 80 | 			", table unavailable\n", mask); | 
 | 81 | 		return 0; | 
 | 82 | 	} | 
 | 83 |  | 
 | 84 | 	if ((tbl->it_offset + tbl->it_size) > (mask >> IOMMU_PAGE_SHIFT)) { | 
 | 85 | 		dev_info(dev, "Warning: IOMMU window too big for device mask\n"); | 
 | 86 | 		dev_info(dev, "mask: 0x%08llx, table end: 0x%08lx\n", | 
 | 87 | 				mask, (tbl->it_offset + tbl->it_size) << | 
 | 88 | 				IOMMU_PAGE_SHIFT); | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 89 | 		return 0; | 
 | 90 | 	} else | 
 | 91 | 		return 1; | 
 | 92 | } | 
 | 93 |  | 
| Milton Miller | d24f9c6 | 2011-06-24 09:05:24 +0000 | [diff] [blame] | 94 | static u64 dma_iommu_get_required_mask(struct device *dev) | 
| Milton Miller | 6a5c7be | 2011-06-24 09:05:22 +0000 | [diff] [blame] | 95 | { | 
 | 96 | 	struct iommu_table *tbl = get_iommu_table_base(dev); | 
 | 97 | 	u64 mask; | 
 | 98 | 	if (!tbl) | 
 | 99 | 		return 0; | 
 | 100 |  | 
 | 101 | 	mask = 1ULL < (fls_long(tbl->it_offset + tbl->it_size) - 1); | 
 | 102 | 	mask += mask - 1; | 
 | 103 |  | 
 | 104 | 	return mask; | 
 | 105 | } | 
 | 106 |  | 
| FUJITA Tomonori | 45223c5 | 2009-08-04 19:08:25 +0000 | [diff] [blame] | 107 | struct dma_map_ops dma_iommu_ops = { | 
| Milton Miller | 2eccacd | 2011-06-24 09:05:25 +0000 | [diff] [blame] | 108 | 	.alloc_coherent		= dma_iommu_alloc_coherent, | 
 | 109 | 	.free_coherent		= dma_iommu_free_coherent, | 
 | 110 | 	.map_sg			= dma_iommu_map_sg, | 
 | 111 | 	.unmap_sg		= dma_iommu_unmap_sg, | 
 | 112 | 	.dma_supported		= dma_iommu_dma_supported, | 
 | 113 | 	.map_page		= dma_iommu_map_page, | 
 | 114 | 	.unmap_page		= dma_iommu_unmap_page, | 
| Milton Miller | d24f9c6 | 2011-06-24 09:05:24 +0000 | [diff] [blame] | 115 | 	.get_required_mask	= dma_iommu_get_required_mask, | 
| Becky Bruce | 8dd0e95 | 2008-09-08 09:09:53 +0000 | [diff] [blame] | 116 | }; | 
 | 117 | EXPORT_SYMBOL(dma_iommu_ops); |