blob: 71d58ddea9c1a375efb0366c8fab3d1f1850cd72 [file] [log] [blame]
Boojin Kimc4e16622011-09-02 09:44:35 +09001/* linux/arch/arm/plat-samsung/dma-ops.c
2 *
3 * Copyright (c) 2011 Samsung Electronics Co., Ltd.
4 * http://www.samsung.com
5 *
6 * Samsung DMA Operations
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation.
11 */
12
13#include <linux/kernel.h>
14#include <linux/errno.h>
15#include <linux/amba/pl330.h>
16#include <linux/scatterlist.h>
Paul Gortmaker0c073e32011-10-08 23:24:48 -040017#include <linux/export.h>
Boojin Kimc4e16622011-09-02 09:44:35 +090018
19#include <mach/dma.h>
20
Boojin Kimc4e16622011-09-02 09:44:35 +090021static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
Padmavathi Vennae7ba5f12013-01-18 17:17:02 +053022 struct samsung_dma_req *param,
23 struct device *dev, char *ch_name)
Boojin Kimc4e16622011-09-02 09:44:35 +090024{
Boojin Kimc4e16622011-09-02 09:44:35 +090025 dma_cap_mask_t mask;
Thomas Abraham4972a802011-10-24 11:43:38 +020026 void *filter_param;
Boojin Kimc4e16622011-09-02 09:44:35 +090027
28 dma_cap_zero(mask);
Boojin Kimfbb20e82012-06-19 13:26:53 +090029 dma_cap_set(param->cap, mask);
Boojin Kimc4e16622011-09-02 09:44:35 +090030
Thomas Abraham4972a802011-10-24 11:43:38 +020031 /*
32 * If a dma channel property of a device node from device tree is
33 * specified, use that as the fliter parameter.
34 */
Boojin Kimfbb20e82012-06-19 13:26:53 +090035 filter_param = (dma_ch == DMACH_DT_PROP) ?
36 (void *)param->dt_dmach_prop : (void *)dma_ch;
Padmavathi Vennae7ba5f12013-01-18 17:17:02 +053037
38 if (dev->of_node)
39 return (unsigned)dma_request_slave_channel(dev, ch_name);
40 else
41 return (unsigned)dma_request_channel(mask, pl330_filter,
42 filter_param);
Boojin Kimc4e16622011-09-02 09:44:35 +090043}
44
Sachin Kamat3688be42012-06-27 11:29:54 +090045static int samsung_dmadev_release(unsigned ch, void *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090046{
47 dma_release_channel((struct dma_chan *)ch);
48
49 return 0;
50}
51
Boojin Kimfbb20e82012-06-19 13:26:53 +090052static int samsung_dmadev_config(unsigned ch,
53 struct samsung_dma_config *param)
54{
55 struct dma_chan *chan = (struct dma_chan *)ch;
56 struct dma_slave_config slave_config;
57
58 if (param->direction == DMA_DEV_TO_MEM) {
59 memset(&slave_config, 0, sizeof(struct dma_slave_config));
60 slave_config.direction = param->direction;
61 slave_config.src_addr = param->fifo;
62 slave_config.src_addr_width = param->width;
63 slave_config.src_maxburst = 1;
64 dmaengine_slave_config(chan, &slave_config);
65 } else if (param->direction == DMA_MEM_TO_DEV) {
66 memset(&slave_config, 0, sizeof(struct dma_slave_config));
67 slave_config.direction = param->direction;
68 slave_config.dst_addr = param->fifo;
69 slave_config.dst_addr_width = param->width;
70 slave_config.dst_maxburst = 1;
71 dmaengine_slave_config(chan, &slave_config);
72 } else {
73 pr_warn("unsupported direction\n");
74 return -EINVAL;
75 }
76
77 return 0;
78}
79
Boojin Kimc4e16622011-09-02 09:44:35 +090080static int samsung_dmadev_prepare(unsigned ch,
Boojin Kimfbb20e82012-06-19 13:26:53 +090081 struct samsung_dma_prep *param)
Boojin Kimc4e16622011-09-02 09:44:35 +090082{
83 struct scatterlist sg;
84 struct dma_chan *chan = (struct dma_chan *)ch;
85 struct dma_async_tx_descriptor *desc;
86
Boojin Kimfbb20e82012-06-19 13:26:53 +090087 switch (param->cap) {
Boojin Kimc4e16622011-09-02 09:44:35 +090088 case DMA_SLAVE:
89 sg_init_table(&sg, 1);
Boojin Kimfbb20e82012-06-19 13:26:53 +090090 sg_dma_len(&sg) = param->len;
91 sg_set_page(&sg, pfn_to_page(PFN_DOWN(param->buf)),
92 param->len, offset_in_page(param->buf));
93 sg_dma_address(&sg) = param->buf;
Boojin Kimc4e16622011-09-02 09:44:35 +090094
Alexandre Bounine16052822012-03-08 16:11:18 -050095 desc = dmaengine_prep_slave_sg(chan,
Boojin Kimfbb20e82012-06-19 13:26:53 +090096 &sg, 1, param->direction, DMA_PREP_INTERRUPT);
Boojin Kimc4e16622011-09-02 09:44:35 +090097 break;
98 case DMA_CYCLIC:
Boojin Kimfbb20e82012-06-19 13:26:53 +090099 desc = dmaengine_prep_dma_cyclic(chan, param->buf,
Peter Ujfalusib7ef37d2012-09-24 10:58:05 +0300100 param->len, param->period, param->direction,
101 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
Boojin Kimc4e16622011-09-02 09:44:35 +0900102 break;
103 default:
104 dev_err(&chan->dev->device, "unsupported format\n");
105 return -EFAULT;
106 }
107
108 if (!desc) {
109 dev_err(&chan->dev->device, "cannot prepare cyclic dma\n");
110 return -EFAULT;
111 }
112
Boojin Kimfbb20e82012-06-19 13:26:53 +0900113 desc->callback = param->fp;
114 desc->callback_param = param->fp_param;
Boojin Kimc4e16622011-09-02 09:44:35 +0900115
116 dmaengine_submit((struct dma_async_tx_descriptor *)desc);
117
118 return 0;
119}
120
121static inline int samsung_dmadev_trigger(unsigned ch)
122{
123 dma_async_issue_pending((struct dma_chan *)ch);
124
125 return 0;
126}
127
128static inline int samsung_dmadev_flush(unsigned ch)
129{
130 return dmaengine_terminate_all((struct dma_chan *)ch);
131}
132
Kukjin Kim6d259a22012-01-21 12:00:13 +0900133static struct samsung_dma_ops dmadev_ops = {
Boojin Kimc4e16622011-09-02 09:44:35 +0900134 .request = samsung_dmadev_request,
135 .release = samsung_dmadev_release,
Boojin Kimfbb20e82012-06-19 13:26:53 +0900136 .config = samsung_dmadev_config,
Boojin Kimc4e16622011-09-02 09:44:35 +0900137 .prepare = samsung_dmadev_prepare,
138 .trigger = samsung_dmadev_trigger,
139 .started = NULL,
140 .flush = samsung_dmadev_flush,
141 .stop = samsung_dmadev_flush,
142};
143
144void *samsung_dmadev_get_ops(void)
145{
146 return &dmadev_ops;
147}
148EXPORT_SYMBOL(samsung_dmadev_get_ops);