| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * DMA Engine test module | 
|  | 3 | * | 
|  | 4 | * Copyright (C) 2007 Atmel Corporation | 
|  | 5 | * | 
|  | 6 | * This program is free software; you can redistribute it and/or modify | 
|  | 7 | * it under the terms of the GNU General Public License version 2 as | 
|  | 8 | * published by the Free Software Foundation. | 
|  | 9 | */ | 
|  | 10 | #include <linux/delay.h> | 
| Alexey Dobriyan | b7f080c | 2011-06-16 11:01:34 +0000 | [diff] [blame] | 11 | #include <linux/dma-mapping.h> | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 12 | #include <linux/dmaengine.h> | 
| Guennadi Liakhovetski | 981ed70 | 2011-08-18 16:50:51 +0200 | [diff] [blame] | 13 | #include <linux/freezer.h> | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 14 | #include <linux/init.h> | 
|  | 15 | #include <linux/kthread.h> | 
|  | 16 | #include <linux/module.h> | 
|  | 17 | #include <linux/moduleparam.h> | 
|  | 18 | #include <linux/random.h> | 
| Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 19 | #include <linux/slab.h> | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 20 | #include <linux/wait.h> | 
|  | 21 |  | 
|  | 22 | static unsigned int test_buf_size = 16384; | 
|  | 23 | module_param(test_buf_size, uint, S_IRUGO); | 
|  | 24 | MODULE_PARM_DESC(test_buf_size, "Size of the memcpy test buffer"); | 
|  | 25 |  | 
| Kay Sievers | 06190d8 | 2008-11-11 13:12:33 -0700 | [diff] [blame] | 26 | static char test_channel[20]; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 27 | module_param_string(channel, test_channel, sizeof(test_channel), S_IRUGO); | 
|  | 28 | MODULE_PARM_DESC(channel, "Bus ID of the channel to test (default: any)"); | 
|  | 29 |  | 
| Kay Sievers | 06190d8 | 2008-11-11 13:12:33 -0700 | [diff] [blame] | 30 | static char test_device[20]; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 31 | module_param_string(device, test_device, sizeof(test_device), S_IRUGO); | 
|  | 32 | MODULE_PARM_DESC(device, "Bus ID of the DMA Engine to test (default: any)"); | 
|  | 33 |  | 
|  | 34 | static unsigned int threads_per_chan = 1; | 
|  | 35 | module_param(threads_per_chan, uint, S_IRUGO); | 
|  | 36 | MODULE_PARM_DESC(threads_per_chan, | 
|  | 37 | "Number of threads to start per channel (default: 1)"); | 
|  | 38 |  | 
|  | 39 | static unsigned int max_channels; | 
|  | 40 | module_param(max_channels, uint, S_IRUGO); | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 41 | MODULE_PARM_DESC(max_channels, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 42 | "Maximum number of channels to use (default: all)"); | 
|  | 43 |  | 
| Nicolas Ferre | 0a2ff57d | 2009-07-03 19:26:51 +0200 | [diff] [blame] | 44 | static unsigned int iterations; | 
|  | 45 | module_param(iterations, uint, S_IRUGO); | 
|  | 46 | MODULE_PARM_DESC(iterations, | 
|  | 47 | "Iterations before stopping test (default: infinite)"); | 
|  | 48 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 49 | static unsigned int xor_sources = 3; | 
|  | 50 | module_param(xor_sources, uint, S_IRUGO); | 
|  | 51 | MODULE_PARM_DESC(xor_sources, | 
|  | 52 | "Number of xor source buffers (default: 3)"); | 
|  | 53 |  | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 54 | static unsigned int pq_sources = 3; | 
|  | 55 | module_param(pq_sources, uint, S_IRUGO); | 
|  | 56 | MODULE_PARM_DESC(pq_sources, | 
|  | 57 | "Number of p+q source buffers (default: 3)"); | 
|  | 58 |  | 
| Viresh Kumar | d42efe6 | 2011-03-22 17:27:25 +0530 | [diff] [blame] | 59 | static int timeout = 3000; | 
|  | 60 | module_param(timeout, uint, S_IRUGO); | 
| Joe Perches | 85ee7a1 | 2011-04-23 20:38:19 -0700 | [diff] [blame] | 61 | MODULE_PARM_DESC(timeout, "Transfer Timeout in msec (default: 3000), " | 
|  | 62 | "Pass -1 for infinite timeout"); | 
| Viresh Kumar | d42efe6 | 2011-03-22 17:27:25 +0530 | [diff] [blame] | 63 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 64 | /* | 
|  | 65 | * Initialization patterns. All bytes in the source buffer has bit 7 | 
|  | 66 | * set, all bytes in the destination buffer has bit 7 cleared. | 
|  | 67 | * | 
|  | 68 | * Bit 6 is set for all bytes which are to be copied by the DMA | 
|  | 69 | * engine. Bit 5 is set for all bytes which are to be overwritten by | 
|  | 70 | * the DMA engine. | 
|  | 71 | * | 
|  | 72 | * The remaining bits are the inverse of a counter which increments by | 
|  | 73 | * one for each byte address. | 
|  | 74 | */ | 
|  | 75 | #define PATTERN_SRC		0x80 | 
|  | 76 | #define PATTERN_DST		0x00 | 
|  | 77 | #define PATTERN_COPY		0x40 | 
|  | 78 | #define PATTERN_OVERWRITE	0x20 | 
|  | 79 | #define PATTERN_COUNT_MASK	0x1f | 
|  | 80 |  | 
|  | 81 | struct dmatest_thread { | 
|  | 82 | struct list_head	node; | 
|  | 83 | struct task_struct	*task; | 
|  | 84 | struct dma_chan		*chan; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 85 | u8			**srcs; | 
|  | 86 | u8			**dsts; | 
|  | 87 | enum dma_transaction_type type; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 88 | }; | 
|  | 89 |  | 
|  | 90 | struct dmatest_chan { | 
|  | 91 | struct list_head	node; | 
|  | 92 | struct dma_chan		*chan; | 
|  | 93 | struct list_head	threads; | 
|  | 94 | }; | 
|  | 95 |  | 
|  | 96 | /* | 
|  | 97 | * These are protected by dma_list_mutex since they're only used by | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 98 | * the DMA filter function callback | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 99 | */ | 
|  | 100 | static LIST_HEAD(dmatest_channels); | 
|  | 101 | static unsigned int nr_channels; | 
|  | 102 |  | 
|  | 103 | static bool dmatest_match_channel(struct dma_chan *chan) | 
|  | 104 | { | 
|  | 105 | if (test_channel[0] == '\0') | 
|  | 106 | return true; | 
| Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 107 | return strcmp(dma_chan_name(chan), test_channel) == 0; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 108 | } | 
|  | 109 |  | 
|  | 110 | static bool dmatest_match_device(struct dma_device *device) | 
|  | 111 | { | 
|  | 112 | if (test_device[0] == '\0') | 
|  | 113 | return true; | 
| Kay Sievers | 06190d8 | 2008-11-11 13:12:33 -0700 | [diff] [blame] | 114 | return strcmp(dev_name(device->dev), test_device) == 0; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 115 | } | 
|  | 116 |  | 
|  | 117 | static unsigned long dmatest_random(void) | 
|  | 118 | { | 
|  | 119 | unsigned long buf; | 
|  | 120 |  | 
|  | 121 | get_random_bytes(&buf, sizeof(buf)); | 
|  | 122 | return buf; | 
|  | 123 | } | 
|  | 124 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 125 | static void dmatest_init_srcs(u8 **bufs, unsigned int start, unsigned int len) | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 126 | { | 
|  | 127 | unsigned int i; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 128 | u8 *buf; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 129 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 130 | for (; (buf = *bufs); bufs++) { | 
|  | 131 | for (i = 0; i < start; i++) | 
|  | 132 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); | 
|  | 133 | for ( ; i < start + len; i++) | 
|  | 134 | buf[i] = PATTERN_SRC | PATTERN_COPY | 
| Joe Perches | c019894 | 2009-06-28 09:26:21 -0700 | [diff] [blame] | 135 | | (~i & PATTERN_COUNT_MASK); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 136 | for ( ; i < test_buf_size; i++) | 
|  | 137 | buf[i] = PATTERN_SRC | (~i & PATTERN_COUNT_MASK); | 
|  | 138 | buf++; | 
|  | 139 | } | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 140 | } | 
|  | 141 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 142 | static void dmatest_init_dsts(u8 **bufs, unsigned int start, unsigned int len) | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 143 | { | 
|  | 144 | unsigned int i; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 145 | u8 *buf; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 146 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 147 | for (; (buf = *bufs); bufs++) { | 
|  | 148 | for (i = 0; i < start; i++) | 
|  | 149 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); | 
|  | 150 | for ( ; i < start + len; i++) | 
|  | 151 | buf[i] = PATTERN_DST | PATTERN_OVERWRITE | 
|  | 152 | | (~i & PATTERN_COUNT_MASK); | 
|  | 153 | for ( ; i < test_buf_size; i++) | 
|  | 154 | buf[i] = PATTERN_DST | (~i & PATTERN_COUNT_MASK); | 
|  | 155 | } | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 156 | } | 
|  | 157 |  | 
|  | 158 | static void dmatest_mismatch(u8 actual, u8 pattern, unsigned int index, | 
|  | 159 | unsigned int counter, bool is_srcbuf) | 
|  | 160 | { | 
|  | 161 | u8		diff = actual ^ pattern; | 
|  | 162 | u8		expected = pattern | (~counter & PATTERN_COUNT_MASK); | 
|  | 163 | const char	*thread_name = current->comm; | 
|  | 164 |  | 
|  | 165 | if (is_srcbuf) | 
|  | 166 | pr_warning("%s: srcbuf[0x%x] overwritten!" | 
|  | 167 | " Expected %02x, got %02x\n", | 
|  | 168 | thread_name, index, expected, actual); | 
|  | 169 | else if ((pattern & PATTERN_COPY) | 
|  | 170 | && (diff & (PATTERN_COPY | PATTERN_OVERWRITE))) | 
|  | 171 | pr_warning("%s: dstbuf[0x%x] not copied!" | 
|  | 172 | " Expected %02x, got %02x\n", | 
|  | 173 | thread_name, index, expected, actual); | 
|  | 174 | else if (diff & PATTERN_SRC) | 
|  | 175 | pr_warning("%s: dstbuf[0x%x] was copied!" | 
|  | 176 | " Expected %02x, got %02x\n", | 
|  | 177 | thread_name, index, expected, actual); | 
|  | 178 | else | 
|  | 179 | pr_warning("%s: dstbuf[0x%x] mismatch!" | 
|  | 180 | " Expected %02x, got %02x\n", | 
|  | 181 | thread_name, index, expected, actual); | 
|  | 182 | } | 
|  | 183 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 184 | static unsigned int dmatest_verify(u8 **bufs, unsigned int start, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 185 | unsigned int end, unsigned int counter, u8 pattern, | 
|  | 186 | bool is_srcbuf) | 
|  | 187 | { | 
|  | 188 | unsigned int i; | 
|  | 189 | unsigned int error_count = 0; | 
|  | 190 | u8 actual; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 191 | u8 expected; | 
|  | 192 | u8 *buf; | 
|  | 193 | unsigned int counter_orig = counter; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 194 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 195 | for (; (buf = *bufs); bufs++) { | 
|  | 196 | counter = counter_orig; | 
|  | 197 | for (i = start; i < end; i++) { | 
|  | 198 | actual = buf[i]; | 
|  | 199 | expected = pattern | (~counter & PATTERN_COUNT_MASK); | 
|  | 200 | if (actual != expected) { | 
|  | 201 | if (error_count < 32) | 
|  | 202 | dmatest_mismatch(actual, pattern, i, | 
|  | 203 | counter, is_srcbuf); | 
|  | 204 | error_count++; | 
|  | 205 | } | 
|  | 206 | counter++; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 207 | } | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 208 | } | 
|  | 209 |  | 
|  | 210 | if (error_count > 32) | 
|  | 211 | pr_warning("%s: %u errors suppressed\n", | 
|  | 212 | current->comm, error_count - 32); | 
|  | 213 |  | 
|  | 214 | return error_count; | 
|  | 215 | } | 
|  | 216 |  | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 217 | /* poor man's completion - we want to use wait_event_freezable() on it */ | 
|  | 218 | struct dmatest_done { | 
|  | 219 | bool			done; | 
|  | 220 | wait_queue_head_t	*wait; | 
|  | 221 | }; | 
|  | 222 |  | 
|  | 223 | static void dmatest_callback(void *arg) | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 224 | { | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 225 | struct dmatest_done *done = arg; | 
|  | 226 |  | 
|  | 227 | done->done = true; | 
|  | 228 | wake_up_all(done->wait); | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 229 | } | 
|  | 230 |  | 
| Andy Shevchenko | 632fd28 | 2012-12-17 15:59:52 -0800 | [diff] [blame] | 231 | static inline void unmap_src(struct device *dev, dma_addr_t *addr, size_t len, | 
|  | 232 | unsigned int count) | 
|  | 233 | { | 
|  | 234 | while (count--) | 
|  | 235 | dma_unmap_single(dev, addr[count], len, DMA_TO_DEVICE); | 
|  | 236 | } | 
|  | 237 |  | 
|  | 238 | static inline void unmap_dst(struct device *dev, dma_addr_t *addr, size_t len, | 
|  | 239 | unsigned int count) | 
|  | 240 | { | 
|  | 241 | while (count--) | 
|  | 242 | dma_unmap_single(dev, addr[count], len, DMA_BIDIRECTIONAL); | 
|  | 243 | } | 
|  | 244 |  | 
| Akinobu Mita | 8be9e32b | 2012-10-28 00:49:32 +0900 | [diff] [blame] | 245 | static unsigned int min_odd(unsigned int x, unsigned int y) | 
|  | 246 | { | 
|  | 247 | unsigned int val = min(x, y); | 
|  | 248 |  | 
|  | 249 | return val % 2 ? val : val - 1; | 
|  | 250 | } | 
|  | 251 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 252 | /* | 
|  | 253 | * This function repeatedly tests DMA transfers of various lengths and | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 254 | * offsets for a given operation type until it is told to exit by | 
|  | 255 | * kthread_stop(). There may be multiple threads running this function | 
|  | 256 | * in parallel for a single channel, and there may be multiple channels | 
|  | 257 | * being tested in parallel. | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 258 | * | 
|  | 259 | * Before each test, the source and destination buffer is initialized | 
|  | 260 | * with a known pattern. This pattern is different depending on | 
|  | 261 | * whether it's in an area which is supposed to be copied or | 
|  | 262 | * overwritten, and different in the source and destination buffers. | 
|  | 263 | * So if the DMA engine doesn't copy exactly what we tell it to copy, | 
|  | 264 | * we'll notice. | 
|  | 265 | */ | 
|  | 266 | static int dmatest_func(void *data) | 
|  | 267 | { | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 268 | DECLARE_WAIT_QUEUE_HEAD_ONSTACK(done_wait); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 269 | struct dmatest_thread	*thread = data; | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 270 | struct dmatest_done	done = { .wait = &done_wait }; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 271 | struct dma_chan		*chan; | 
| Akinobu Mita | 8be9e32b | 2012-10-28 00:49:32 +0900 | [diff] [blame] | 272 | struct dma_device	*dev; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 273 | const char		*thread_name; | 
|  | 274 | unsigned int		src_off, dst_off, len; | 
|  | 275 | unsigned int		error_count; | 
|  | 276 | unsigned int		failed_tests = 0; | 
|  | 277 | unsigned int		total_tests = 0; | 
|  | 278 | dma_cookie_t		cookie; | 
|  | 279 | enum dma_status		status; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 280 | enum dma_ctrl_flags 	flags; | 
| Anatolij Gustschin | 94de648 | 2010-02-15 22:35:23 +0100 | [diff] [blame] | 281 | u8			pq_coefs[pq_sources + 1]; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 282 | int			ret; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 283 | int			src_cnt; | 
|  | 284 | int			dst_cnt; | 
|  | 285 | int			i; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 286 |  | 
|  | 287 | thread_name = current->comm; | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 288 | set_freezable(); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 289 |  | 
|  | 290 | ret = -ENOMEM; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 291 |  | 
|  | 292 | smp_rmb(); | 
|  | 293 | chan = thread->chan; | 
| Akinobu Mita | 8be9e32b | 2012-10-28 00:49:32 +0900 | [diff] [blame] | 294 | dev = chan->device; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 295 | if (thread->type == DMA_MEMCPY) | 
|  | 296 | src_cnt = dst_cnt = 1; | 
|  | 297 | else if (thread->type == DMA_XOR) { | 
| Akinobu Mita | 8be9e32b | 2012-10-28 00:49:32 +0900 | [diff] [blame] | 298 | /* force odd to ensure dst = src */ | 
|  | 299 | src_cnt = min_odd(xor_sources | 1, dev->max_xor); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 300 | dst_cnt = 1; | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 301 | } else if (thread->type == DMA_PQ) { | 
| Akinobu Mita | 8be9e32b | 2012-10-28 00:49:32 +0900 | [diff] [blame] | 302 | /* force odd to ensure dst = src */ | 
|  | 303 | src_cnt = min_odd(pq_sources | 1, dma_maxpq(dev, 0)); | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 304 | dst_cnt = 2; | 
| Anatolij Gustschin | 94de648 | 2010-02-15 22:35:23 +0100 | [diff] [blame] | 305 | for (i = 0; i < src_cnt; i++) | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 306 | pq_coefs[i] = 1; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 307 | } else | 
|  | 308 | goto err_srcs; | 
|  | 309 |  | 
|  | 310 | thread->srcs = kcalloc(src_cnt+1, sizeof(u8 *), GFP_KERNEL); | 
|  | 311 | if (!thread->srcs) | 
|  | 312 | goto err_srcs; | 
|  | 313 | for (i = 0; i < src_cnt; i++) { | 
|  | 314 | thread->srcs[i] = kmalloc(test_buf_size, GFP_KERNEL); | 
|  | 315 | if (!thread->srcs[i]) | 
|  | 316 | goto err_srcbuf; | 
|  | 317 | } | 
|  | 318 | thread->srcs[i] = NULL; | 
|  | 319 |  | 
|  | 320 | thread->dsts = kcalloc(dst_cnt+1, sizeof(u8 *), GFP_KERNEL); | 
|  | 321 | if (!thread->dsts) | 
|  | 322 | goto err_dsts; | 
|  | 323 | for (i = 0; i < dst_cnt; i++) { | 
|  | 324 | thread->dsts[i] = kmalloc(test_buf_size, GFP_KERNEL); | 
|  | 325 | if (!thread->dsts[i]) | 
|  | 326 | goto err_dstbuf; | 
|  | 327 | } | 
|  | 328 | thread->dsts[i] = NULL; | 
|  | 329 |  | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 330 | set_user_nice(current, 10); | 
|  | 331 |  | 
| Ira Snyder | b203bd3 | 2011-03-03 07:54:53 +0000 | [diff] [blame] | 332 | /* | 
|  | 333 | * src buffers are freed by the DMAEngine code with dma_unmap_single() | 
|  | 334 | * dst buffers are freed by ourselves below | 
|  | 335 | */ | 
|  | 336 | flags = DMA_CTRL_ACK | DMA_PREP_INTERRUPT | 
|  | 337 | | DMA_COMPL_SKIP_DEST_UNMAP | DMA_COMPL_SRC_UNMAP_SINGLE; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 338 |  | 
| Nicolas Ferre | 0a2ff57d | 2009-07-03 19:26:51 +0200 | [diff] [blame] | 339 | while (!kthread_should_stop() | 
|  | 340 | && !(iterations && total_tests >= iterations)) { | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 341 | struct dma_async_tx_descriptor *tx = NULL; | 
|  | 342 | dma_addr_t dma_srcs[src_cnt]; | 
|  | 343 | dma_addr_t dma_dsts[dst_cnt]; | 
| Dan Williams | 83544ae | 2009-09-08 17:42:53 -0700 | [diff] [blame] | 344 | u8 align = 0; | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 345 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 346 | total_tests++; | 
|  | 347 |  | 
| Dan Williams | 83544ae | 2009-09-08 17:42:53 -0700 | [diff] [blame] | 348 | /* honor alignment restrictions */ | 
|  | 349 | if (thread->type == DMA_MEMCPY) | 
|  | 350 | align = dev->copy_align; | 
|  | 351 | else if (thread->type == DMA_XOR) | 
|  | 352 | align = dev->xor_align; | 
|  | 353 | else if (thread->type == DMA_PQ) | 
|  | 354 | align = dev->pq_align; | 
|  | 355 |  | 
| Guennadi Liakhovetski | cfe4f27 | 2009-12-04 19:44:48 +0100 | [diff] [blame] | 356 | if (1 << align > test_buf_size) { | 
|  | 357 | pr_err("%u-byte buffer too small for %d-byte alignment\n", | 
|  | 358 | test_buf_size, 1 << align); | 
|  | 359 | break; | 
|  | 360 | } | 
|  | 361 |  | 
|  | 362 | len = dmatest_random() % test_buf_size + 1; | 
| Dan Williams | 83544ae | 2009-09-08 17:42:53 -0700 | [diff] [blame] | 363 | len = (len >> align) << align; | 
| Guennadi Liakhovetski | cfe4f27 | 2009-12-04 19:44:48 +0100 | [diff] [blame] | 364 | if (!len) | 
|  | 365 | len = 1 << align; | 
|  | 366 | src_off = dmatest_random() % (test_buf_size - len + 1); | 
|  | 367 | dst_off = dmatest_random() % (test_buf_size - len + 1); | 
|  | 368 |  | 
| Dan Williams | 83544ae | 2009-09-08 17:42:53 -0700 | [diff] [blame] | 369 | src_off = (src_off >> align) << align; | 
|  | 370 | dst_off = (dst_off >> align) << align; | 
|  | 371 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 372 | dmatest_init_srcs(thread->srcs, src_off, len); | 
|  | 373 | dmatest_init_dsts(thread->dsts, dst_off, len); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 374 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 375 | for (i = 0; i < src_cnt; i++) { | 
|  | 376 | u8 *buf = thread->srcs[i] + src_off; | 
|  | 377 |  | 
|  | 378 | dma_srcs[i] = dma_map_single(dev->dev, buf, len, | 
|  | 379 | DMA_TO_DEVICE); | 
| Andy Shevchenko | afde3be | 2012-12-17 15:59:53 -0800 | [diff] [blame] | 380 | ret = dma_mapping_error(dev->dev, dma_srcs[i]); | 
|  | 381 | if (ret) { | 
|  | 382 | unmap_src(dev->dev, dma_srcs, len, i); | 
|  | 383 | pr_warn("%s: #%u: mapping error %d with " | 
|  | 384 | "src_off=0x%x len=0x%x\n", | 
|  | 385 | thread_name, total_tests - 1, ret, | 
|  | 386 | src_off, len); | 
|  | 387 | failed_tests++; | 
|  | 388 | continue; | 
|  | 389 | } | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 390 | } | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 391 | /* map with DMA_BIDIRECTIONAL to force writeback/invalidate */ | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 392 | for (i = 0; i < dst_cnt; i++) { | 
|  | 393 | dma_dsts[i] = dma_map_single(dev->dev, thread->dsts[i], | 
|  | 394 | test_buf_size, | 
|  | 395 | DMA_BIDIRECTIONAL); | 
| Andy Shevchenko | afde3be | 2012-12-17 15:59:53 -0800 | [diff] [blame] | 396 | ret = dma_mapping_error(dev->dev, dma_dsts[i]); | 
|  | 397 | if (ret) { | 
|  | 398 | unmap_src(dev->dev, dma_srcs, len, src_cnt); | 
|  | 399 | unmap_dst(dev->dev, dma_dsts, test_buf_size, i); | 
|  | 400 | pr_warn("%s: #%u: mapping error %d with " | 
|  | 401 | "dst_off=0x%x len=0x%x\n", | 
|  | 402 | thread_name, total_tests - 1, ret, | 
|  | 403 | dst_off, test_buf_size); | 
|  | 404 | failed_tests++; | 
|  | 405 | continue; | 
|  | 406 | } | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 407 | } | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 408 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 409 | if (thread->type == DMA_MEMCPY) | 
|  | 410 | tx = dev->device_prep_dma_memcpy(chan, | 
|  | 411 | dma_dsts[0] + dst_off, | 
|  | 412 | dma_srcs[0], len, | 
|  | 413 | flags); | 
|  | 414 | else if (thread->type == DMA_XOR) | 
|  | 415 | tx = dev->device_prep_dma_xor(chan, | 
|  | 416 | dma_dsts[0] + dst_off, | 
| Dan Williams | 67b9124 | 2010-02-28 22:20:18 -0700 | [diff] [blame] | 417 | dma_srcs, src_cnt, | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 418 | len, flags); | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 419 | else if (thread->type == DMA_PQ) { | 
|  | 420 | dma_addr_t dma_pq[dst_cnt]; | 
|  | 421 |  | 
|  | 422 | for (i = 0; i < dst_cnt; i++) | 
|  | 423 | dma_pq[i] = dma_dsts[i] + dst_off; | 
|  | 424 | tx = dev->device_prep_dma_pq(chan, dma_pq, dma_srcs, | 
| Anatolij Gustschin | 94de648 | 2010-02-15 22:35:23 +0100 | [diff] [blame] | 425 | src_cnt, pq_coefs, | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 426 | len, flags); | 
|  | 427 | } | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 428 |  | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 429 | if (!tx) { | 
| Andy Shevchenko | 632fd28 | 2012-12-17 15:59:52 -0800 | [diff] [blame] | 430 | unmap_src(dev->dev, dma_srcs, len, src_cnt); | 
|  | 431 | unmap_dst(dev->dev, dma_dsts, test_buf_size, dst_cnt); | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 432 | pr_warning("%s: #%u: prep error with src_off=0x%x " | 
|  | 433 | "dst_off=0x%x len=0x%x\n", | 
|  | 434 | thread_name, total_tests - 1, | 
|  | 435 | src_off, dst_off, len); | 
|  | 436 | msleep(100); | 
|  | 437 | failed_tests++; | 
|  | 438 | continue; | 
|  | 439 | } | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 440 |  | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 441 | done.done = false; | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 442 | tx->callback = dmatest_callback; | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 443 | tx->callback_param = &done; | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 444 | cookie = tx->tx_submit(tx); | 
|  | 445 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 446 | if (dma_submit_error(cookie)) { | 
|  | 447 | pr_warning("%s: #%u: submit error %d with src_off=0x%x " | 
|  | 448 | "dst_off=0x%x len=0x%x\n", | 
|  | 449 | thread_name, total_tests - 1, cookie, | 
|  | 450 | src_off, dst_off, len); | 
|  | 451 | msleep(100); | 
|  | 452 | failed_tests++; | 
|  | 453 | continue; | 
|  | 454 | } | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 455 | dma_async_issue_pending(chan); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 456 |  | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 457 | wait_event_freezable_timeout(done_wait, done.done, | 
|  | 458 | msecs_to_jiffies(timeout)); | 
| Guennadi Liakhovetski | 981ed70 | 2011-08-18 16:50:51 +0200 | [diff] [blame] | 459 |  | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 460 | status = dma_async_is_tx_complete(chan, cookie, NULL, NULL); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 461 |  | 
| Tejun Heo | adfa543 | 2011-11-23 09:28:16 -0800 | [diff] [blame] | 462 | if (!done.done) { | 
|  | 463 | /* | 
|  | 464 | * We're leaving the timed out dma operation with | 
|  | 465 | * dangling pointer to done_wait.  To make this | 
|  | 466 | * correct, we'll need to allocate wait_done for | 
|  | 467 | * each test iteration and perform "who's gonna | 
|  | 468 | * free it this time?" dancing.  For now, just | 
|  | 469 | * leave it dangling. | 
|  | 470 | */ | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 471 | pr_warning("%s: #%u: test timed out\n", | 
|  | 472 | thread_name, total_tests - 1); | 
|  | 473 | failed_tests++; | 
|  | 474 | continue; | 
|  | 475 | } else if (status != DMA_SUCCESS) { | 
|  | 476 | pr_warning("%s: #%u: got completion callback," | 
|  | 477 | " but status is \'%s\'\n", | 
|  | 478 | thread_name, total_tests - 1, | 
|  | 479 | status == DMA_ERROR ? "error" : "in progress"); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 480 | failed_tests++; | 
|  | 481 | continue; | 
|  | 482 | } | 
| Dan Williams | e44e0aa | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 483 |  | 
| Atsushi Nemoto | d86be86 | 2009-01-13 09:22:20 -0700 | [diff] [blame] | 484 | /* Unmap by myself (see DMA_COMPL_SKIP_DEST_UNMAP above) */ | 
| Andy Shevchenko | 632fd28 | 2012-12-17 15:59:52 -0800 | [diff] [blame] | 485 | unmap_dst(dev->dev, dma_dsts, test_buf_size, dst_cnt); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 486 |  | 
|  | 487 | error_count = 0; | 
|  | 488 |  | 
|  | 489 | pr_debug("%s: verifying source buffer...\n", thread_name); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 490 | error_count += dmatest_verify(thread->srcs, 0, src_off, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 491 | 0, PATTERN_SRC, true); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 492 | error_count += dmatest_verify(thread->srcs, src_off, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 493 | src_off + len, src_off, | 
|  | 494 | PATTERN_SRC | PATTERN_COPY, true); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 495 | error_count += dmatest_verify(thread->srcs, src_off + len, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 496 | test_buf_size, src_off + len, | 
|  | 497 | PATTERN_SRC, true); | 
|  | 498 |  | 
|  | 499 | pr_debug("%s: verifying dest buffer...\n", | 
|  | 500 | thread->task->comm); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 501 | error_count += dmatest_verify(thread->dsts, 0, dst_off, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 502 | 0, PATTERN_DST, false); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 503 | error_count += dmatest_verify(thread->dsts, dst_off, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 504 | dst_off + len, src_off, | 
|  | 505 | PATTERN_SRC | PATTERN_COPY, false); | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 506 | error_count += dmatest_verify(thread->dsts, dst_off + len, | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 507 | test_buf_size, dst_off + len, | 
|  | 508 | PATTERN_DST, false); | 
|  | 509 |  | 
|  | 510 | if (error_count) { | 
|  | 511 | pr_warning("%s: #%u: %u errors with " | 
|  | 512 | "src_off=0x%x dst_off=0x%x len=0x%x\n", | 
|  | 513 | thread_name, total_tests - 1, error_count, | 
|  | 514 | src_off, dst_off, len); | 
|  | 515 | failed_tests++; | 
|  | 516 | } else { | 
|  | 517 | pr_debug("%s: #%u: No errors with " | 
|  | 518 | "src_off=0x%x dst_off=0x%x len=0x%x\n", | 
|  | 519 | thread_name, total_tests - 1, | 
|  | 520 | src_off, dst_off, len); | 
|  | 521 | } | 
|  | 522 | } | 
|  | 523 |  | 
|  | 524 | ret = 0; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 525 | for (i = 0; thread->dsts[i]; i++) | 
|  | 526 | kfree(thread->dsts[i]); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 527 | err_dstbuf: | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 528 | kfree(thread->dsts); | 
|  | 529 | err_dsts: | 
|  | 530 | for (i = 0; thread->srcs[i]; i++) | 
|  | 531 | kfree(thread->srcs[i]); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 532 | err_srcbuf: | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 533 | kfree(thread->srcs); | 
|  | 534 | err_srcs: | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 535 | pr_notice("%s: terminating after %u tests, %u failures (status %d)\n", | 
|  | 536 | thread_name, total_tests, failed_tests, ret); | 
| Nicolas Ferre | 0a2ff57d | 2009-07-03 19:26:51 +0200 | [diff] [blame] | 537 |  | 
| Viresh Kumar | 9704efa | 2011-07-29 16:21:57 +0530 | [diff] [blame] | 538 | /* terminate all transfers on specified channels */ | 
| Shiraz Hashim | 5e034f7 | 2012-11-09 15:26:29 +0000 | [diff] [blame] | 539 | if (ret) | 
|  | 540 | dmaengine_terminate_all(chan); | 
|  | 541 |  | 
| Nicolas Ferre | 0a2ff57d | 2009-07-03 19:26:51 +0200 | [diff] [blame] | 542 | if (iterations > 0) | 
|  | 543 | while (!kthread_should_stop()) { | 
| Yong Zhang | b953df7 | 2010-02-05 21:52:37 +0800 | [diff] [blame] | 544 | DECLARE_WAIT_QUEUE_HEAD_ONSTACK(wait_dmatest_exit); | 
| Nicolas Ferre | 0a2ff57d | 2009-07-03 19:26:51 +0200 | [diff] [blame] | 545 | interruptible_sleep_on(&wait_dmatest_exit); | 
|  | 546 | } | 
|  | 547 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 548 | return ret; | 
|  | 549 | } | 
|  | 550 |  | 
|  | 551 | static void dmatest_cleanup_channel(struct dmatest_chan *dtc) | 
|  | 552 | { | 
|  | 553 | struct dmatest_thread	*thread; | 
|  | 554 | struct dmatest_thread	*_thread; | 
|  | 555 | int			ret; | 
|  | 556 |  | 
|  | 557 | list_for_each_entry_safe(thread, _thread, &dtc->threads, node) { | 
|  | 558 | ret = kthread_stop(thread->task); | 
|  | 559 | pr_debug("dmatest: thread %s exited with status %d\n", | 
|  | 560 | thread->task->comm, ret); | 
|  | 561 | list_del(&thread->node); | 
|  | 562 | kfree(thread); | 
|  | 563 | } | 
| Viresh Kumar | 9704efa | 2011-07-29 16:21:57 +0530 | [diff] [blame] | 564 |  | 
|  | 565 | /* terminate all transfers on specified channels */ | 
| Jon Mason | 944ea4d | 2012-11-11 23:03:20 +0000 | [diff] [blame] | 566 | dmaengine_terminate_all(dtc->chan); | 
| Viresh Kumar | 9704efa | 2011-07-29 16:21:57 +0530 | [diff] [blame] | 567 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 568 | kfree(dtc); | 
|  | 569 | } | 
|  | 570 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 571 | static int dmatest_add_threads(struct dmatest_chan *dtc, enum dma_transaction_type type) | 
|  | 572 | { | 
|  | 573 | struct dmatest_thread *thread; | 
|  | 574 | struct dma_chan *chan = dtc->chan; | 
|  | 575 | char *op; | 
|  | 576 | unsigned int i; | 
|  | 577 |  | 
|  | 578 | if (type == DMA_MEMCPY) | 
|  | 579 | op = "copy"; | 
|  | 580 | else if (type == DMA_XOR) | 
|  | 581 | op = "xor"; | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 582 | else if (type == DMA_PQ) | 
|  | 583 | op = "pq"; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 584 | else | 
|  | 585 | return -EINVAL; | 
|  | 586 |  | 
|  | 587 | for (i = 0; i < threads_per_chan; i++) { | 
|  | 588 | thread = kzalloc(sizeof(struct dmatest_thread), GFP_KERNEL); | 
|  | 589 | if (!thread) { | 
|  | 590 | pr_warning("dmatest: No memory for %s-%s%u\n", | 
|  | 591 | dma_chan_name(chan), op, i); | 
|  | 592 |  | 
|  | 593 | break; | 
|  | 594 | } | 
|  | 595 | thread->chan = dtc->chan; | 
|  | 596 | thread->type = type; | 
|  | 597 | smp_wmb(); | 
|  | 598 | thread->task = kthread_run(dmatest_func, thread, "%s-%s%u", | 
|  | 599 | dma_chan_name(chan), op, i); | 
|  | 600 | if (IS_ERR(thread->task)) { | 
|  | 601 | pr_warning("dmatest: Failed to run thread %s-%s%u\n", | 
|  | 602 | dma_chan_name(chan), op, i); | 
|  | 603 | kfree(thread); | 
|  | 604 | break; | 
|  | 605 | } | 
|  | 606 |  | 
|  | 607 | /* srcbuf and dstbuf are allocated by the thread itself */ | 
|  | 608 |  | 
|  | 609 | list_add_tail(&thread->node, &dtc->threads); | 
|  | 610 | } | 
|  | 611 |  | 
|  | 612 | return i; | 
|  | 613 | } | 
|  | 614 |  | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 615 | static int dmatest_add_channel(struct dma_chan *chan) | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 616 | { | 
|  | 617 | struct dmatest_chan	*dtc; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 618 | struct dma_device	*dma_dev = chan->device; | 
|  | 619 | unsigned int		thread_count = 0; | 
| Kulikov Vasiliy | b9033e6 | 2010-07-17 19:19:48 +0400 | [diff] [blame] | 620 | int cnt; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 621 |  | 
| Andrew Morton | 6fdb8bd | 2008-09-19 04:16:23 -0700 | [diff] [blame] | 622 | dtc = kmalloc(sizeof(struct dmatest_chan), GFP_KERNEL); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 623 | if (!dtc) { | 
| Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 624 | pr_warning("dmatest: No memory for %s\n", dma_chan_name(chan)); | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 625 | return -ENOMEM; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 626 | } | 
|  | 627 |  | 
|  | 628 | dtc->chan = chan; | 
|  | 629 | INIT_LIST_HEAD(&dtc->threads); | 
|  | 630 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 631 | if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) { | 
|  | 632 | cnt = dmatest_add_threads(dtc, DMA_MEMCPY); | 
| Nicolas Ferre | f1aef8b | 2009-07-06 18:19:44 +0200 | [diff] [blame] | 633 | thread_count += cnt > 0 ? cnt : 0; | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 634 | } | 
|  | 635 | if (dma_has_cap(DMA_XOR, dma_dev->cap_mask)) { | 
|  | 636 | cnt = dmatest_add_threads(dtc, DMA_XOR); | 
| Nicolas Ferre | f1aef8b | 2009-07-06 18:19:44 +0200 | [diff] [blame] | 637 | thread_count += cnt > 0 ? cnt : 0; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 638 | } | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 639 | if (dma_has_cap(DMA_PQ, dma_dev->cap_mask)) { | 
|  | 640 | cnt = dmatest_add_threads(dtc, DMA_PQ); | 
| Dr. David Alan Gilbert | d07a74a | 2011-08-25 16:13:55 -0700 | [diff] [blame] | 641 | thread_count += cnt > 0 ? cnt : 0; | 
| Dan Williams | 58691d6 | 2009-08-29 19:09:27 -0700 | [diff] [blame] | 642 | } | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 643 |  | 
| Dan Williams | b54d5cb | 2009-03-25 09:13:25 -0700 | [diff] [blame] | 644 | pr_info("dmatest: Started %u threads using %s\n", | 
|  | 645 | thread_count, dma_chan_name(chan)); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 646 |  | 
|  | 647 | list_add_tail(&dtc->node, &dmatest_channels); | 
|  | 648 | nr_channels++; | 
|  | 649 |  | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 650 | return 0; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 651 | } | 
|  | 652 |  | 
| Dan Williams | 7dd6025 | 2009-01-06 11:38:19 -0700 | [diff] [blame] | 653 | static bool filter(struct dma_chan *chan, void *param) | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 654 | { | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 655 | if (!dmatest_match_channel(chan) || !dmatest_match_device(chan->device)) | 
| Dan Williams | 7dd6025 | 2009-01-06 11:38:19 -0700 | [diff] [blame] | 656 | return false; | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 657 | else | 
| Dan Williams | 7dd6025 | 2009-01-06 11:38:19 -0700 | [diff] [blame] | 658 | return true; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 659 | } | 
|  | 660 |  | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 661 | static int __init dmatest_init(void) | 
|  | 662 | { | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 663 | dma_cap_mask_t mask; | 
|  | 664 | struct dma_chan *chan; | 
|  | 665 | int err = 0; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 666 |  | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 667 | dma_cap_zero(mask); | 
|  | 668 | dma_cap_set(DMA_MEMCPY, mask); | 
|  | 669 | for (;;) { | 
|  | 670 | chan = dma_request_channel(mask, filter, NULL); | 
|  | 671 | if (chan) { | 
|  | 672 | err = dmatest_add_channel(chan); | 
| Dan Williams | c56c81a | 2009-04-08 15:08:23 -0700 | [diff] [blame] | 673 | if (err) { | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 674 | dma_release_channel(chan); | 
|  | 675 | break; /* add_channel failed, punt */ | 
|  | 676 | } | 
|  | 677 | } else | 
|  | 678 | break; /* no more channels available */ | 
|  | 679 | if (max_channels && nr_channels >= max_channels) | 
|  | 680 | break; /* we have all we need */ | 
|  | 681 | } | 
|  | 682 |  | 
|  | 683 | return err; | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 684 | } | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 685 | /* when compiled-in wait for drivers to load first */ | 
|  | 686 | late_initcall(dmatest_init); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 687 |  | 
|  | 688 | static void __exit dmatest_exit(void) | 
|  | 689 | { | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 690 | struct dmatest_chan *dtc, *_dtc; | 
| Dan Williams | 7cbd487 | 2009-03-04 16:06:03 -0700 | [diff] [blame] | 691 | struct dma_chan *chan; | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 692 |  | 
|  | 693 | list_for_each_entry_safe(dtc, _dtc, &dmatest_channels, node) { | 
|  | 694 | list_del(&dtc->node); | 
| Dan Williams | 7cbd487 | 2009-03-04 16:06:03 -0700 | [diff] [blame] | 695 | chan = dtc->chan; | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 696 | dmatest_cleanup_channel(dtc); | 
|  | 697 | pr_debug("dmatest: dropped channel %s\n", | 
| Dan Williams | 7cbd487 | 2009-03-04 16:06:03 -0700 | [diff] [blame] | 698 | dma_chan_name(chan)); | 
|  | 699 | dma_release_channel(chan); | 
| Dan Williams | 33df8ca | 2009-01-06 11:38:15 -0700 | [diff] [blame] | 700 | } | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 701 | } | 
|  | 702 | module_exit(dmatest_exit); | 
|  | 703 |  | 
| Jean Delvare | e05503e | 2011-05-18 16:49:24 +0200 | [diff] [blame] | 704 | MODULE_AUTHOR("Haavard Skinnemoen (Atmel)"); | 
| Haavard Skinnemoen | 4a776f0 | 2008-07-08 11:58:45 -0700 | [diff] [blame] | 705 | MODULE_LICENSE("GPL v2"); |