async_desc        613 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc;
async_desc        630 drivers/dma/qcom/bam_dma.c 	async_desc = kzalloc(struct_size(async_desc, desc, num_alloc),
async_desc        633 drivers/dma/qcom/bam_dma.c 	if (!async_desc)
async_desc        637 drivers/dma/qcom/bam_dma.c 		async_desc->flags |= DESC_FLAG_NWD;
async_desc        640 drivers/dma/qcom/bam_dma.c 		async_desc->flags |= DESC_FLAG_EOT;
async_desc        642 drivers/dma/qcom/bam_dma.c 	async_desc->num_desc = num_alloc;
async_desc        643 drivers/dma/qcom/bam_dma.c 	async_desc->curr_desc = async_desc->desc;
async_desc        644 drivers/dma/qcom/bam_dma.c 	async_desc->dir = direction;
async_desc        647 drivers/dma/qcom/bam_dma.c 	desc = async_desc->desc;
async_desc        668 drivers/dma/qcom/bam_dma.c 			async_desc->length += le16_to_cpu(desc->size);
async_desc        673 drivers/dma/qcom/bam_dma.c 	return vchan_tx_prep(&bchan->vc, &async_desc->vd, flags);
async_desc        676 drivers/dma/qcom/bam_dma.c 	kfree(async_desc);
async_desc        691 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc, *tmp;
async_desc        711 drivers/dma/qcom/bam_dma.c 		async_desc = list_first_entry(&bchan->desc_list,
async_desc        713 drivers/dma/qcom/bam_dma.c 		bam_chan_init_hw(bchan, async_desc->dir);
async_desc        716 drivers/dma/qcom/bam_dma.c 	list_for_each_entry_safe(async_desc, tmp,
async_desc        718 drivers/dma/qcom/bam_dma.c 		list_add(&async_desc->vd.node, &bchan->vc.desc_issued);
async_desc        719 drivers/dma/qcom/bam_dma.c 		list_del(&async_desc->desc_node);
async_desc        793 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc, *tmp;
async_desc        824 drivers/dma/qcom/bam_dma.c 		list_for_each_entry_safe(async_desc, tmp,
async_desc        827 drivers/dma/qcom/bam_dma.c 			if (avail < async_desc->xfer_len)
async_desc        831 drivers/dma/qcom/bam_dma.c 			bchan->head += async_desc->xfer_len;
async_desc        834 drivers/dma/qcom/bam_dma.c 			async_desc->num_desc -= async_desc->xfer_len;
async_desc        835 drivers/dma/qcom/bam_dma.c 			async_desc->curr_desc += async_desc->xfer_len;
async_desc        836 drivers/dma/qcom/bam_dma.c 			avail -= async_desc->xfer_len;
async_desc        843 drivers/dma/qcom/bam_dma.c 			if (!async_desc->num_desc) {
async_desc        844 drivers/dma/qcom/bam_dma.c 				vchan_cookie_complete(&async_desc->vd);
async_desc        846 drivers/dma/qcom/bam_dma.c 				list_add(&async_desc->vd.node,
async_desc        849 drivers/dma/qcom/bam_dma.c 			list_del(&async_desc->desc_node);
async_desc        911 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc;
async_desc        930 drivers/dma/qcom/bam_dma.c 		list_for_each_entry(async_desc, &bchan->desc_list, desc_node) {
async_desc        931 drivers/dma/qcom/bam_dma.c 			if (async_desc->vd.tx.cookie != cookie)
async_desc        934 drivers/dma/qcom/bam_dma.c 			for (i = 0; i < async_desc->num_desc; i++)
async_desc        936 drivers/dma/qcom/bam_dma.c 						async_desc->curr_desc[i].size);
async_desc        982 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc = NULL;
async_desc       1002 drivers/dma/qcom/bam_dma.c 		async_desc = container_of(vd, struct bam_async_desc, vd);
async_desc       1006 drivers/dma/qcom/bam_dma.c 			bam_chan_init_hw(bchan, async_desc->dir);
async_desc       1010 drivers/dma/qcom/bam_dma.c 			bam_apply_new_config(bchan, async_desc->dir);
async_desc       1012 drivers/dma/qcom/bam_dma.c 		desc = async_desc->curr_desc;
async_desc       1016 drivers/dma/qcom/bam_dma.c 		if (async_desc->num_desc > avail)
async_desc       1017 drivers/dma/qcom/bam_dma.c 			async_desc->xfer_len = avail;
async_desc       1019 drivers/dma/qcom/bam_dma.c 			async_desc->xfer_len = async_desc->num_desc;
async_desc       1022 drivers/dma/qcom/bam_dma.c 		if (async_desc->num_desc == async_desc->xfer_len)
async_desc       1023 drivers/dma/qcom/bam_dma.c 			desc[async_desc->xfer_len - 1].flags |=
async_desc       1024 drivers/dma/qcom/bam_dma.c 						cpu_to_le16(async_desc->flags);
async_desc       1028 drivers/dma/qcom/bam_dma.c 		dmaengine_desc_get_callback(&async_desc->vd.tx, &cb);
async_desc       1038 drivers/dma/qcom/bam_dma.c 		if (((avail <= async_desc->xfer_len) || !vd ||
async_desc       1040 drivers/dma/qcom/bam_dma.c 		    !(async_desc->flags & DESC_FLAG_EOT))
async_desc       1041 drivers/dma/qcom/bam_dma.c 			desc[async_desc->xfer_len - 1].flags |=
async_desc       1044 drivers/dma/qcom/bam_dma.c 		if (bchan->tail + async_desc->xfer_len > MAX_DESCRIPTORS) {
async_desc       1050 drivers/dma/qcom/bam_dma.c 			       (async_desc->xfer_len - partial) *
async_desc       1054 drivers/dma/qcom/bam_dma.c 			       async_desc->xfer_len *
async_desc       1058 drivers/dma/qcom/bam_dma.c 		bchan->tail += async_desc->xfer_len;
async_desc       1060 drivers/dma/qcom/bam_dma.c 		list_add_tail(&async_desc->desc_node, &bchan->desc_list);
async_desc       1124 drivers/dma/qcom/bam_dma.c 	struct bam_async_desc *async_desc = container_of(vd,
async_desc       1127 drivers/dma/qcom/bam_dma.c 	kfree(async_desc);