Skip to content

Commit 1a9e7a0

Browse files
kedareswararaoVinod Koul
authored and
Vinod Koul
committed
dmaengine: vdma: Add support for mulit-channel dma mode
This patch adds support for AXI DMA multi-channel dma mode Multichannel mode enables DMA to connect to multiple masters and slaves on the streaming side. In Multichannel mode AXI DMA supports 2D transfers. Signed-off-by: Kedareswara rao Appana <[email protected]> Signed-off-by: Vinod Koul <[email protected]>
1 parent ba2c194 commit 1a9e7a0

File tree

1 file changed

+190
-23
lines changed

1 file changed

+190
-23
lines changed

drivers/dma/xilinx/xilinx_vdma.c

Lines changed: 190 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@
114114
#define XILINX_VDMA_REG_START_ADDRESS_64(n) (0x000c + 8 * (n))
115115

116116
/* HW specific definitions */
117-
#define XILINX_DMA_MAX_CHANS_PER_DEVICE 0x2
117+
#define XILINX_DMA_MAX_CHANS_PER_DEVICE 0x20
118118

119119
#define XILINX_DMA_DMAXR_ALL_IRQ_MASK \
120120
(XILINX_DMA_DMASR_FRM_CNT_IRQ | \
@@ -165,6 +165,18 @@
165165
#define XILINX_DMA_COALESCE_MAX 255
166166
#define XILINX_DMA_NUM_APP_WORDS 5
167167

168+
/* Multi-Channel DMA Descriptor offsets*/
169+
#define XILINX_DMA_MCRX_CDESC(x) (0x40 + (x-1) * 0x20)
170+
#define XILINX_DMA_MCRX_TDESC(x) (0x48 + (x-1) * 0x20)
171+
172+
/* Multi-Channel DMA Masks/Shifts */
173+
#define XILINX_DMA_BD_HSIZE_MASK GENMASK(15, 0)
174+
#define XILINX_DMA_BD_STRIDE_MASK GENMASK(15, 0)
175+
#define XILINX_DMA_BD_VSIZE_MASK GENMASK(31, 19)
176+
#define XILINX_DMA_BD_TDEST_MASK GENMASK(4, 0)
177+
#define XILINX_DMA_BD_STRIDE_SHIFT 0
178+
#define XILINX_DMA_BD_VSIZE_SHIFT 19
179+
168180
/* AXI CDMA Specific Registers/Offsets */
169181
#define XILINX_CDMA_REG_SRCADDR 0x18
170182
#define XILINX_CDMA_REG_DSTADDR 0x20
@@ -210,8 +222,8 @@ struct xilinx_axidma_desc_hw {
210222
u32 next_desc_msb;
211223
u32 buf_addr;
212224
u32 buf_addr_msb;
213-
u32 pad1;
214-
u32 pad2;
225+
u32 mcdma_control;
226+
u32 vsize_stride;
215227
u32 control;
216228
u32 status;
217229
u32 app[XILINX_DMA_NUM_APP_WORDS];
@@ -349,6 +361,7 @@ struct xilinx_dma_chan {
349361
struct xilinx_axidma_tx_segment *seg_v;
350362
struct xilinx_axidma_tx_segment *cyclic_seg_v;
351363
void (*start_transfer)(struct xilinx_dma_chan *chan);
364+
u16 tdest;
352365
};
353366

354367
struct xilinx_dma_config {
@@ -365,6 +378,7 @@ struct xilinx_dma_config {
365378
* @common: DMA device structure
366379
* @chan: Driver specific DMA channel
367380
* @has_sg: Specifies whether Scatter-Gather is present or not
381+
* @mcdma: Specifies whether Multi-Channel is present or not
368382
* @flush_on_fsync: Flush on frame sync
369383
* @ext_addr: Indicates 64 bit addressing is supported by dma device
370384
* @pdev: Platform device structure pointer
@@ -374,13 +388,16 @@ struct xilinx_dma_config {
374388
* @txs_clk: DMA mm2s stream clock
375389
* @rx_clk: DMA s2mm clock
376390
* @rxs_clk: DMA s2mm stream clock
391+
* @nr_channels: Number of channels DMA device supports
392+
* @chan_id: DMA channel identifier
377393
*/
378394
struct xilinx_dma_device {
379395
void __iomem *regs;
380396
struct device *dev;
381397
struct dma_device common;
382398
struct xilinx_dma_chan *chan[XILINX_DMA_MAX_CHANS_PER_DEVICE];
383399
bool has_sg;
400+
bool mcdma;
384401
u32 flush_on_fsync;
385402
bool ext_addr;
386403
struct platform_device *pdev;
@@ -390,6 +407,8 @@ struct xilinx_dma_device {
390407
struct clk *txs_clk;
391408
struct clk *rx_clk;
392409
struct clk *rxs_clk;
410+
u32 nr_channels;
411+
u32 chan_id;
393412
};
394413

395414
/* Macros */
@@ -1196,18 +1215,20 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
11961215
tail_segment = list_last_entry(&tail_desc->segments,
11971216
struct xilinx_axidma_tx_segment, node);
11981217

1199-
old_head = list_first_entry(&head_desc->segments,
1200-
struct xilinx_axidma_tx_segment, node);
1201-
new_head = chan->seg_v;
1202-
/* Copy Buffer Descriptor fields. */
1203-
new_head->hw = old_head->hw;
1218+
if (chan->has_sg && !chan->xdev->mcdma) {
1219+
old_head = list_first_entry(&head_desc->segments,
1220+
struct xilinx_axidma_tx_segment, node);
1221+
new_head = chan->seg_v;
1222+
/* Copy Buffer Descriptor fields. */
1223+
new_head->hw = old_head->hw;
12041224

1205-
/* Swap and save new reserve */
1206-
list_replace_init(&old_head->node, &new_head->node);
1207-
chan->seg_v = old_head;
1225+
/* Swap and save new reserve */
1226+
list_replace_init(&old_head->node, &new_head->node);
1227+
chan->seg_v = old_head;
12081228

1209-
tail_segment->hw.next_desc = chan->seg_v->phys;
1210-
head_desc->async_tx.phys = new_head->phys;
1229+
tail_segment->hw.next_desc = chan->seg_v->phys;
1230+
head_desc->async_tx.phys = new_head->phys;
1231+
}
12111232

12121233
reg = dma_ctrl_read(chan, XILINX_DMA_REG_DMACR);
12131234

@@ -1218,23 +1239,53 @@ static void xilinx_dma_start_transfer(struct xilinx_dma_chan *chan)
12181239
dma_ctrl_write(chan, XILINX_DMA_REG_DMACR, reg);
12191240
}
12201241

1221-
if (chan->has_sg)
1242+
if (chan->has_sg && !chan->xdev->mcdma)
12221243
xilinx_write(chan, XILINX_DMA_REG_CURDESC,
12231244
head_desc->async_tx.phys);
12241245

1246+
if (chan->has_sg && chan->xdev->mcdma) {
1247+
if (chan->direction == DMA_MEM_TO_DEV) {
1248+
dma_ctrl_write(chan, XILINX_DMA_REG_CURDESC,
1249+
head_desc->async_tx.phys);
1250+
} else {
1251+
if (!chan->tdest) {
1252+
dma_ctrl_write(chan, XILINX_DMA_REG_CURDESC,
1253+
head_desc->async_tx.phys);
1254+
} else {
1255+
dma_ctrl_write(chan,
1256+
XILINX_DMA_MCRX_CDESC(chan->tdest),
1257+
head_desc->async_tx.phys);
1258+
}
1259+
}
1260+
}
1261+
12251262
xilinx_dma_start(chan);
12261263

12271264
if (chan->err)
12281265
return;
12291266

12301267
/* Start the transfer */
1231-
if (chan->has_sg) {
1268+
if (chan->has_sg && !chan->xdev->mcdma) {
12321269
if (chan->cyclic)
12331270
xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
12341271
chan->cyclic_seg_v->phys);
12351272
else
12361273
xilinx_write(chan, XILINX_DMA_REG_TAILDESC,
12371274
tail_segment->phys);
1275+
} else if (chan->has_sg && chan->xdev->mcdma) {
1276+
if (chan->direction == DMA_MEM_TO_DEV) {
1277+
dma_ctrl_write(chan, XILINX_DMA_REG_TAILDESC,
1278+
tail_segment->phys);
1279+
} else {
1280+
if (!chan->tdest) {
1281+
dma_ctrl_write(chan, XILINX_DMA_REG_TAILDESC,
1282+
tail_segment->phys);
1283+
} else {
1284+
dma_ctrl_write(chan,
1285+
XILINX_DMA_MCRX_TDESC(chan->tdest),
1286+
tail_segment->phys);
1287+
}
1288+
}
12381289
} else {
12391290
struct xilinx_axidma_tx_segment *segment;
12401291
struct xilinx_axidma_desc_hw *hw;
@@ -1861,6 +1912,90 @@ static struct dma_async_tx_descriptor *xilinx_dma_prep_dma_cyclic(
18611912
return NULL;
18621913
}
18631914

1915+
/**
1916+
* xilinx_dma_prep_interleaved - prepare a descriptor for a
1917+
* DMA_SLAVE transaction
1918+
* @dchan: DMA channel
1919+
* @xt: Interleaved template pointer
1920+
* @flags: transfer ack flags
1921+
*
1922+
* Return: Async transaction descriptor on success and NULL on failure
1923+
*/
1924+
static struct dma_async_tx_descriptor *
1925+
xilinx_dma_prep_interleaved(struct dma_chan *dchan,
1926+
struct dma_interleaved_template *xt,
1927+
unsigned long flags)
1928+
{
1929+
struct xilinx_dma_chan *chan = to_xilinx_chan(dchan);
1930+
struct xilinx_dma_tx_descriptor *desc;
1931+
struct xilinx_axidma_tx_segment *segment;
1932+
struct xilinx_axidma_desc_hw *hw;
1933+
1934+
if (!is_slave_direction(xt->dir))
1935+
return NULL;
1936+
1937+
if (!xt->numf || !xt->sgl[0].size)
1938+
return NULL;
1939+
1940+
if (xt->frame_size != 1)
1941+
return NULL;
1942+
1943+
/* Allocate a transaction descriptor. */
1944+
desc = xilinx_dma_alloc_tx_descriptor(chan);
1945+
if (!desc)
1946+
return NULL;
1947+
1948+
chan->direction = xt->dir;
1949+
dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
1950+
desc->async_tx.tx_submit = xilinx_dma_tx_submit;
1951+
1952+
/* Get a free segment */
1953+
segment = xilinx_axidma_alloc_tx_segment(chan);
1954+
if (!segment)
1955+
goto error;
1956+
1957+
hw = &segment->hw;
1958+
1959+
/* Fill in the descriptor */
1960+
if (xt->dir != DMA_MEM_TO_DEV)
1961+
hw->buf_addr = xt->dst_start;
1962+
else
1963+
hw->buf_addr = xt->src_start;
1964+
1965+
hw->mcdma_control = chan->tdest & XILINX_DMA_BD_TDEST_MASK;
1966+
hw->vsize_stride = (xt->numf << XILINX_DMA_BD_VSIZE_SHIFT) &
1967+
XILINX_DMA_BD_VSIZE_MASK;
1968+
hw->vsize_stride |= (xt->sgl[0].icg + xt->sgl[0].size) &
1969+
XILINX_DMA_BD_STRIDE_MASK;
1970+
hw->control = xt->sgl[0].size & XILINX_DMA_BD_HSIZE_MASK;
1971+
1972+
/*
1973+
* Insert the segment into the descriptor segments
1974+
* list.
1975+
*/
1976+
list_add_tail(&segment->node, &desc->segments);
1977+
1978+
1979+
segment = list_first_entry(&desc->segments,
1980+
struct xilinx_axidma_tx_segment, node);
1981+
desc->async_tx.phys = segment->phys;
1982+
1983+
/* For the last DMA_MEM_TO_DEV transfer, set EOP */
1984+
if (xt->dir == DMA_MEM_TO_DEV) {
1985+
segment->hw.control |= XILINX_DMA_BD_SOP;
1986+
segment = list_last_entry(&desc->segments,
1987+
struct xilinx_axidma_tx_segment,
1988+
node);
1989+
segment->hw.control |= XILINX_DMA_BD_EOP;
1990+
}
1991+
1992+
return &desc->async_tx;
1993+
1994+
error:
1995+
xilinx_dma_free_tx_descriptor(chan, desc);
1996+
return NULL;
1997+
}
1998+
18641999
/**
18652000
* xilinx_dma_terminate_all - Halt the channel and free descriptors
18662001
* @chan: Driver specific DMA Channel pointer
@@ -2176,7 +2311,7 @@ static void xdma_disable_allclks(struct xilinx_dma_device *xdev)
21762311
* Return: '0' on success and failure value on error
21772312
*/
21782313
static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
2179-
struct device_node *node)
2314+
struct device_node *node, int chan_id)
21802315
{
21812316
struct xilinx_dma_chan *chan;
21822317
bool has_dre = false;
@@ -2220,7 +2355,8 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
22202355

22212356
if (of_device_is_compatible(node, "xlnx,axi-vdma-mm2s-channel")) {
22222357
chan->direction = DMA_MEM_TO_DEV;
2223-
chan->id = 0;
2358+
chan->id = chan_id;
2359+
chan->tdest = chan_id;
22242360

22252361
chan->ctrl_offset = XILINX_DMA_MM2S_CTRL_OFFSET;
22262362
if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
@@ -2233,7 +2369,8 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
22332369
} else if (of_device_is_compatible(node,
22342370
"xlnx,axi-vdma-s2mm-channel")) {
22352371
chan->direction = DMA_DEV_TO_MEM;
2236-
chan->id = 1;
2372+
chan->id = chan_id;
2373+
chan->tdest = chan_id - xdev->nr_channels;
22372374

22382375
chan->ctrl_offset = XILINX_DMA_S2MM_CTRL_OFFSET;
22392376
if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
@@ -2287,6 +2424,32 @@ static int xilinx_dma_chan_probe(struct xilinx_dma_device *xdev,
22872424
return 0;
22882425
}
22892426

2427+
/**
2428+
* xilinx_dma_child_probe - Per child node probe
2429+
* It get number of dma-channels per child node from
2430+
* device-tree and initializes all the channels.
2431+
*
2432+
* @xdev: Driver specific device structure
2433+
* @node: Device node
2434+
*
2435+
* Return: 0 always.
2436+
*/
2437+
static int xilinx_dma_child_probe(struct xilinx_dma_device *xdev,
2438+
struct device_node *node) {
2439+
int ret, i, nr_channels = 1;
2440+
2441+
ret = of_property_read_u32(node, "dma-channels", &nr_channels);
2442+
if ((ret < 0) && xdev->mcdma)
2443+
dev_warn(xdev->dev, "missing dma-channels property\n");
2444+
2445+
for (i = 0; i < nr_channels; i++)
2446+
xilinx_dma_chan_probe(xdev, node, xdev->chan_id++);
2447+
2448+
xdev->nr_channels += nr_channels;
2449+
2450+
return 0;
2451+
}
2452+
22902453
/**
22912454
* of_dma_xilinx_xlate - Translation function
22922455
* @dma_spec: Pointer to DMA specifier as found in the device tree
@@ -2300,7 +2463,7 @@ static struct dma_chan *of_dma_xilinx_xlate(struct of_phandle_args *dma_spec,
23002463
struct xilinx_dma_device *xdev = ofdma->of_dma_data;
23012464
int chan_id = dma_spec->args[0];
23022465

2303-
if (chan_id >= XILINX_DMA_MAX_CHANS_PER_DEVICE || !xdev->chan[chan_id])
2466+
if (chan_id >= xdev->nr_channels || !xdev->chan[chan_id])
23042467
return NULL;
23052468

23062469
return dma_get_slave_channel(&xdev->chan[chan_id]->common);
@@ -2376,6 +2539,8 @@ static int xilinx_dma_probe(struct platform_device *pdev)
23762539

23772540
/* Retrieve the DMA engine properties from the device tree */
23782541
xdev->has_sg = of_property_read_bool(node, "xlnx,include-sg");
2542+
if (xdev->dma_config->dmatype == XDMA_TYPE_AXIDMA)
2543+
xdev->mcdma = of_property_read_bool(node, "xlnx,mcdma");
23792544

23802545
if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
23812546
err = of_property_read_u32(node, "xlnx,num-fstores",
@@ -2426,6 +2591,8 @@ static int xilinx_dma_probe(struct platform_device *pdev)
24262591
xdev->common.device_prep_slave_sg = xilinx_dma_prep_slave_sg;
24272592
xdev->common.device_prep_dma_cyclic =
24282593
xilinx_dma_prep_dma_cyclic;
2594+
xdev->common.device_prep_interleaved_dma =
2595+
xilinx_dma_prep_interleaved;
24292596
/* Residue calculation is supported by only AXI DMA */
24302597
xdev->common.residue_granularity =
24312598
DMA_RESIDUE_GRANULARITY_SEGMENT;
@@ -2441,13 +2608,13 @@ static int xilinx_dma_probe(struct platform_device *pdev)
24412608

24422609
/* Initialize the channels */
24432610
for_each_child_of_node(node, child) {
2444-
err = xilinx_dma_chan_probe(xdev, child);
2611+
err = xilinx_dma_child_probe(xdev, child);
24452612
if (err < 0)
24462613
goto disable_clks;
24472614
}
24482615

24492616
if (xdev->dma_config->dmatype == XDMA_TYPE_VDMA) {
2450-
for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++)
2617+
for (i = 0; i < xdev->nr_channels; i++)
24512618
if (xdev->chan[i])
24522619
xdev->chan[i]->num_frms = num_frames;
24532620
}
@@ -2470,7 +2637,7 @@ static int xilinx_dma_probe(struct platform_device *pdev)
24702637
disable_clks:
24712638
xdma_disable_allclks(xdev);
24722639
error:
2473-
for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++)
2640+
for (i = 0; i < xdev->nr_channels; i++)
24742641
if (xdev->chan[i])
24752642
xilinx_dma_chan_remove(xdev->chan[i]);
24762643

@@ -2492,7 +2659,7 @@ static int xilinx_dma_remove(struct platform_device *pdev)
24922659

24932660
dma_async_device_unregister(&xdev->common);
24942661

2495-
for (i = 0; i < XILINX_DMA_MAX_CHANS_PER_DEVICE; i++)
2662+
for (i = 0; i < xdev->nr_channels; i++)
24962663
if (xdev->chan[i])
24972664
xilinx_dma_chan_remove(xdev->chan[i]);
24982665

0 commit comments

Comments
 (0)