vd 229 arch/arm/mach-omap2/vc.c struct voltagedomain *vd; vd 239 arch/arm/mach-omap2/vc.c struct voltagedomain *vd = vc.vd; vd 268 arch/arm/mach-omap2/vc.c vd->write(voltctrl, OMAP3_PRM_VOLTCTRL_OFFSET); vd 272 arch/arm/mach-omap2/vc.c vd->write(c->voltsetup1, vd 277 arch/arm/mach-omap2/vc.c vd->write(c->voltsetup2, vd 292 arch/arm/mach-omap2/vc.c if (vc.vd) vd 295 arch/arm/mach-omap2/vc.c vc.vd = voltdm; vd 343 arch/arm/vfp/vfp.h u32 vfp_double_normaliseround(int dd, struct vfp_double *vd, u32 fpscr, u32 exceptions, const char *func); vd 54 arch/arm/vfp/vfpdouble.c static void vfp_double_normalise_denormal(struct vfp_double *vd) vd 56 arch/arm/vfp/vfpdouble.c int bits = 31 - fls(vd->significand >> 32); vd 58 arch/arm/vfp/vfpdouble.c bits = 63 - fls(vd->significand); vd 60 arch/arm/vfp/vfpdouble.c vfp_double_dump("normalise_denormal: in", vd); vd 63 arch/arm/vfp/vfpdouble.c vd->exponent -= bits - 1; vd 64 arch/arm/vfp/vfpdouble.c vd->significand <<= bits; vd 67 arch/arm/vfp/vfpdouble.c vfp_double_dump("normalise_denormal: out", vd); vd 70 arch/arm/vfp/vfpdouble.c u32 vfp_double_normaliseround(int dd, struct vfp_double *vd, u32 fpscr, u32 exceptions, const char *func) vd 76 arch/arm/vfp/vfpdouble.c vfp_double_dump("pack: in", vd); vd 81 arch/arm/vfp/vfpdouble.c if (vd->exponent == 2047 && (vd->significand == 0 || exceptions)) vd 87 arch/arm/vfp/vfpdouble.c if (vd->significand == 0) { vd 88 arch/arm/vfp/vfpdouble.c vd->exponent = 0; vd 92 arch/arm/vfp/vfpdouble.c exponent = vd->exponent; vd 93 arch/arm/vfp/vfpdouble.c significand = vd->significand; vd 104 arch/arm/vfp/vfpdouble.c vd->exponent = exponent; vd 105 arch/arm/vfp/vfpdouble.c vd->significand = significand; vd 106 arch/arm/vfp/vfpdouble.c vfp_double_dump("pack: normalised", vd); vd 117 arch/arm/vfp/vfpdouble.c vd->exponent = exponent; vd 118 arch/arm/vfp/vfpdouble.c vd->significand = significand; vd 119 arch/arm/vfp/vfpdouble.c vfp_double_dump("pack: tiny number", vd); vd 137 arch/arm/vfp/vfpdouble.c } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vd->sign != 0)) vd 150 arch/arm/vfp/vfpdouble.c vd->exponent = exponent; vd 151 arch/arm/vfp/vfpdouble.c vd->significand = significand; vd 152 arch/arm/vfp/vfpdouble.c vfp_double_dump("pack: overflow", vd); vd 174 arch/arm/vfp/vfpdouble.c vd->exponent = 2045; vd 175 arch/arm/vfp/vfpdouble.c vd->significand = 0x7fffffffffffffffULL; vd 177 arch/arm/vfp/vfpdouble.c vd->exponent = 2047; /* infinity */ vd 178 arch/arm/vfp/vfpdouble.c vd->significand = 0; vd 187 arch/arm/vfp/vfpdouble.c vd->exponent = exponent; vd 188 arch/arm/vfp/vfpdouble.c vd->significand = significand >> 1; vd 192 arch/arm/vfp/vfpdouble.c vfp_double_dump("pack: final", vd); vd 194 arch/arm/vfp/vfpdouble.c s64 d = vfp_double_pack(vd); vd 752 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int vd : 5, vd 264 arch/powerpc/kernel/vecemu.c unsigned int va, vb, vc, vd; vd 271 arch/powerpc/kernel/vecemu.c vd = (instr >> 21) & 0x1f; vd 281 arch/powerpc/kernel/vecemu.c vaddfp(&vrs[vd], &vrs[va], &vrs[vb]); vd 284 arch/powerpc/kernel/vecemu.c vsubfp(&vrs[vd], &vrs[va], &vrs[vb]); vd 287 arch/powerpc/kernel/vecemu.c vrefp(&vrs[vd], &vrs[vb]); vd 290 arch/powerpc/kernel/vecemu.c vrsqrtefp(&vrs[vd], &vrs[vb]); vd 294 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = eexp2(vrs[vb].u[i]); vd 298 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = elog2(vrs[vb].u[i]); vd 302 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = rfin(vrs[vb].u[i]); vd 306 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = rfiz(vrs[vb].u[i]); vd 312 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = x; vd 319 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = x; vd 324 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = ctuxs(vrs[vb].u[i], va, vd 329 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = ctsxs(vrs[vb].u[i], va, vd 337 arch/powerpc/kernel/vecemu.c vmaddfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); vd 340 arch/powerpc/kernel/vecemu.c vnmsubfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); vd 73 arch/powerpc/platforms/pseries/mobility.c const char *name, u32 vd, char *value) vd 84 arch/powerpc/platforms/pseries/mobility.c if (vd & 0x80000000) { vd 85 arch/powerpc/platforms/pseries/mobility.c vd = ~vd + 1; vd 91 arch/powerpc/platforms/pseries/mobility.c char *new_data = kzalloc(new_prop->length + vd, GFP_KERNEL); vd 96 arch/powerpc/platforms/pseries/mobility.c memcpy(new_data + new_prop->length, value, vd); vd 100 arch/powerpc/platforms/pseries/mobility.c new_prop->length += vd; vd 112 arch/powerpc/platforms/pseries/mobility.c new_prop->length = vd; vd 120 arch/powerpc/platforms/pseries/mobility.c memcpy(new_prop->value, value, vd); vd 142 arch/powerpc/platforms/pseries/mobility.c u32 vd; vd 177 arch/powerpc/platforms/pseries/mobility.c vd = be32_to_cpu(*(__be32 *)prop_data); vd 178 arch/powerpc/platforms/pseries/mobility.c prop_data += vd + sizeof(vd); vd 187 arch/powerpc/platforms/pseries/mobility.c vd = be32_to_cpu(*(__be32 *)prop_data); vd 188 arch/powerpc/platforms/pseries/mobility.c prop_data += sizeof(vd); vd 190 arch/powerpc/platforms/pseries/mobility.c switch (vd) { vd 203 arch/powerpc/platforms/pseries/mobility.c vd, prop_data); vd 209 arch/powerpc/platforms/pseries/mobility.c prop_data += vd; vd 122 arch/s390/kernel/vdso.c static void __init vdso_init_data(struct vdso_data *vd) vd 124 arch/s390/kernel/vdso.c vd->ectg_available = test_facility(31); vd 149 arch/s390/kernel/vdso.c struct vdso_per_cpu_data *vd; vd 160 arch/s390/kernel/vdso.c vd = (struct vdso_per_cpu_data *) page_frame; vd 161 arch/s390/kernel/vdso.c vd->cpu_nr = lowcore->cpu_nr; vd 162 arch/s390/kernel/vdso.c vd->node_id = cpu_to_node(vd->cpu_nr); vd 233 drivers/acpi/acpi_video.c struct acpi_video_device *vd = bl_get_data(bd); vd 235 drivers/acpi/acpi_video.c if (acpi_video_device_lcd_get_level_current(vd, &cur_level, false)) vd 237 drivers/acpi/acpi_video.c for (i = ACPI_VIDEO_FIRST_LEVEL; i < vd->brightness->count; i++) { vd 238 drivers/acpi/acpi_video.c if (vd->brightness->levels[i] == cur_level) vd 247 drivers/acpi/acpi_video.c struct acpi_video_device *vd = bl_get_data(bd); vd 249 drivers/acpi/acpi_video.c cancel_delayed_work(&vd->switch_brightness_work); vd 250 drivers/acpi/acpi_video.c return acpi_video_device_lcd_set_level(vd, vd 251 drivers/acpi/acpi_video.c vd->brightness->levels[request_level]); vd 76 drivers/clk/versatile/icst.c unsigned int vd; vd 81 drivers/clk/versatile/icst.c vd = (f + fref_div / 2) / fref_div; vd 82 drivers/clk/versatile/icst.c if (vd < p->vd_min || vd > p->vd_max) vd 85 drivers/clk/versatile/icst.c f_pll = fref_div * vd; vd 91 drivers/clk/versatile/icst.c vco.v = vd - 8; vd 199 drivers/dma/amba-pl08x.c struct virt_dma_desc vd; vd 284 drivers/dma/amba-pl08x.c const struct vendor_data *vd; vd 322 drivers/dma/amba-pl08x.c return container_of(tx, struct pl08x_txd, vd.tx); vd 394 drivers/dma/amba-pl08x.c if (pl08x->vd->pl080s) vd 514 drivers/dma/amba-pl08x.c if (pl08x->vd->pl080s) vd 531 drivers/dma/amba-pl08x.c struct virt_dma_desc *vd = vchan_next_desc(&plchan->vc); vd 532 drivers/dma/amba-pl08x.c struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); vd 535 drivers/dma/amba-pl08x.c list_del(&txd->vd.node); vd 825 drivers/dma/amba-pl08x.c for (i = 0; i < pl08x->vd->channels; i++) { vd 839 drivers/dma/amba-pl08x.c if (i == pl08x->vd->channels) { vd 973 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 1015 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 1136 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 1186 drivers/dma/amba-pl08x.c if (pl08x->vd->pl080s) vd 1189 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 1220 drivers/dma/amba-pl08x.c if (pl08x->vd->pl080s) { vd 1334 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) vd 1403 drivers/dma/amba-pl08x.c pl08x->vd->max_transfer_size; vd 1481 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) vd 1508 drivers/dma/amba-pl08x.c static void pl08x_desc_free(struct virt_dma_desc *vd) vd 1510 drivers/dma/amba-pl08x.c struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); vd 1511 drivers/dma/amba-pl08x.c struct pl08x_dma_chan *plchan = to_pl08x_chan(vd->tx.chan); vd 1513 drivers/dma/amba-pl08x.c dma_descriptor_unmap(&vd->tx); vd 1555 drivers/dma/amba-pl08x.c struct virt_dma_desc *vd; vd 1577 drivers/dma/amba-pl08x.c vd = vchan_find_desc(&plchan->vc, cookie); vd 1578 drivers/dma/amba-pl08x.c if (vd) { vd 1580 drivers/dma/amba-pl08x.c struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); vd 1836 drivers/dma/amba-pl08x.c if (pl08x->vd->dualmaster) vd 1878 drivers/dma/amba-pl08x.c if (pl08x->vd->dualmaster) vd 1916 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 1933 drivers/dma/amba-pl08x.c return vchan_tx_prep(&plchan->vc, &txd->vd, flags); vd 2088 drivers/dma/amba-pl08x.c return vchan_tx_prep(&plchan->vc, &txd->vd, flags); vd 2131 drivers/dma/amba-pl08x.c return vchan_tx_prep(&plchan->vc, &txd->vd, flags); vd 2148 drivers/dma/amba-pl08x.c if (config->device_fc && pl08x->vd->pl080s) { vd 2183 drivers/dma/amba-pl08x.c vchan_terminate_vdesc(&plchan->at->vd); vd 2282 drivers/dma/amba-pl08x.c if (pl08x->vd->nomadik) vd 2285 drivers/dma/amba-pl08x.c if (pl08x->vd->ftdmac020) { vd 2311 drivers/dma/amba-pl08x.c for (i = 0; i < pl08x->vd->channels; i++) { vd 2328 drivers/dma/amba-pl08x.c vchan_cyclic_callback(&tx->vd); vd 2337 drivers/dma/amba-pl08x.c vchan_cookie_complete(&tx->vd); vd 2468 drivers/dma/amba-pl08x.c for (i = 0; i < pl08x->vd->channels; i++) { vd 2668 drivers/dma/amba-pl08x.c if (pl08x->vd->signals) { vd 2670 drivers/dma/amba-pl08x.c pl08x->vd->signals, vd 2677 drivers/dma/amba-pl08x.c for (i = 0; i < pl08x->vd->signals; i++) { vd 2684 drivers/dma/amba-pl08x.c pd->num_slave_channels = pl08x->vd->signals; vd 2704 drivers/dma/amba-pl08x.c struct vendor_data *vd = id->data; vd 2728 drivers/dma/amba-pl08x.c pl08x->vd = vd; vd 2736 drivers/dma/amba-pl08x.c if (vd->ftdmac020) { vd 2754 drivers/dma/amba-pl08x.c vd->channels = (val >> 12) & 0x0f; vd 2755 drivers/dma/amba-pl08x.c vd->dualmaster = !!(val & BIT(9)); vd 2775 drivers/dma/amba-pl08x.c if (vd->ftdmac020) vd 2783 drivers/dma/amba-pl08x.c if (vd->signals) { vd 2830 drivers/dma/amba-pl08x.c if (pl08x->vd->dualmaster) { vd 2835 drivers/dma/amba-pl08x.c if (vd->pl080s) vd 2853 drivers/dma/amba-pl08x.c if (vd->ftdmac020) vd 2869 drivers/dma/amba-pl08x.c pl08x->phy_chans = kzalloc((vd->channels * sizeof(*pl08x->phy_chans)), vd 2876 drivers/dma/amba-pl08x.c for (i = 0; i < vd->channels; i++) { vd 2881 drivers/dma/amba-pl08x.c if (vd->ftdmac020) { vd 2891 drivers/dma/amba-pl08x.c ch->reg_config = ch->base + vd->config_offset; vd 2897 drivers/dma/amba-pl08x.c if (vd->pl080s) vd 2907 drivers/dma/amba-pl08x.c if (vd->nomadik) { vd 2923 drivers/dma/amba-pl08x.c pl08x->vd->channels, false); vd 2964 drivers/dma/amba-pl08x.c amba_part(adev), pl08x->vd->pl080s ? "s" : "", amba_rev(adev), vd 89 drivers/dma/bcm2835-dma.c struct virt_dma_desc vd; vd 203 drivers/dma/bcm2835-dma.c return container_of(t, struct bcm2835_desc, vd.tx); vd 217 drivers/dma/bcm2835-dma.c static void bcm2835_dma_desc_free(struct virt_dma_desc *vd) vd 220 drivers/dma/bcm2835-dma.c container_of(vd, struct bcm2835_desc, vd)); vd 442 drivers/dma/bcm2835-dma.c struct virt_dma_desc *vd = vchan_next_desc(&c->vc); vd 445 drivers/dma/bcm2835-dma.c if (!vd) { vd 450 drivers/dma/bcm2835-dma.c list_del(&vd->node); vd 452 drivers/dma/bcm2835-dma.c c->desc = d = to_bcm2835_dma_desc(&vd->tx); vd 490 drivers/dma/bcm2835-dma.c vchan_cyclic_callback(&d->vd); vd 492 drivers/dma/bcm2835-dma.c vchan_cookie_complete(&c->desc->vd); vd 568 drivers/dma/bcm2835-dma.c struct virt_dma_desc *vd; vd 577 drivers/dma/bcm2835-dma.c vd = vchan_find_desc(&c->vc, cookie); vd 578 drivers/dma/bcm2835-dma.c if (vd) { vd 580 drivers/dma/bcm2835-dma.c bcm2835_dma_desc_size(to_bcm2835_dma_desc(&vd->tx)); vd 581 drivers/dma/bcm2835-dma.c } else if (c->desc && c->desc->vd.tx.cookie == cookie) { vd 639 drivers/dma/bcm2835-dma.c return vchan_tx_prep(&c->vc, &d->vd, flags); vd 691 drivers/dma/bcm2835-dma.c return vchan_tx_prep(&c->vc, &d->vd, flags); vd 777 drivers/dma/bcm2835-dma.c return vchan_tx_prep(&c->vc, &d->vd, flags); vd 800 drivers/dma/bcm2835-dma.c if (c->desc->vd.tx.flags & DMA_PREP_INTERRUPT) vd 801 drivers/dma/bcm2835-dma.c vchan_terminate_vdesc(&c->desc->vd); vd 803 drivers/dma/bcm2835-dma.c vchan_vdesc_fini(&c->desc->vd); vd 213 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c desc->vd.tx.phys = phys; vd 228 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c dma_pool_free(dw->desc_pool, child, child->vd.tx.phys); vd 232 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c dma_pool_free(dw->desc_pool, desc, desc->vd.tx.phys); vd 298 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c write_chan_llp(chan, first->vd.tx.phys | lms); vd 313 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c struct virt_dma_desc *vd; vd 315 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vd = vchan_next_desc(&chan->vc); vd 316 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c if (!vd) vd 319 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c desc = vd_to_axi_desc(vd); vd 321 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vd->tx.cookie); vd 495 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c write_desc_llp(prev, desc->vd.tx.phys | lms); vd 512 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c return vchan_tx_prep(&chan->vc, &first->vd, flags); vd 545 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c struct virt_dma_desc *vd; vd 553 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vd = vchan_next_desc(&chan->vc); vd 555 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c list_del(&vd->node); vd 560 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c axi_chan_name(chan), vd->tx.cookie, status); vd 561 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c axi_chan_list_dump_lli(chan, vd_to_axi_desc(vd)); vd 563 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vchan_cookie_complete(vd); vd 573 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c struct virt_dma_desc *vd; vd 584 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vd = vchan_next_desc(&chan->vc); vd 586 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c list_del(&vd->node); vd 587 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c vchan_cookie_complete(vd); vd 86 drivers/dma/dw-axi-dmac/dw-axi-dmac.h struct virt_dma_desc vd; vd 101 drivers/dma/dw-axi-dmac/dw-axi-dmac.h static inline struct axi_dma_desc *vd_to_axi_desc(struct virt_dma_desc *vd) vd 103 drivers/dma/dw-axi-dmac/dw-axi-dmac.h return container_of(vd, struct axi_dma_desc, vd); vd 37 drivers/dma/dw-edma/dw-edma-core.c struct dw_edma_desc *vd2dw_edma_desc(struct virt_dma_desc *vd) vd 39 drivers/dma/dw-edma/dw-edma-core.c return container_of(vd, struct dw_edma_desc, vd); vd 172 drivers/dma/dw-edma/dw-edma-core.c struct virt_dma_desc *vd; vd 174 drivers/dma/dw-edma/dw-edma-core.c vd = vchan_next_desc(&chan->vc); vd 175 drivers/dma/dw-edma/dw-edma-core.c if (!vd) vd 178 drivers/dma/dw-edma/dw-edma-core.c desc = vd2dw_edma_desc(vd); vd 291 drivers/dma/dw-edma/dw-edma-core.c struct virt_dma_desc *vd; vd 307 drivers/dma/dw-edma/dw-edma-core.c vd = vchan_find_desc(&chan->vc, cookie); vd 308 drivers/dma/dw-edma/dw-edma-core.c if (vd) { vd 309 drivers/dma/dw-edma/dw-edma-core.c desc = vd2dw_edma_desc(vd); vd 425 drivers/dma/dw-edma/dw-edma-core.c return vchan_tx_prep(&chan->vc, &desc->vd, xfer->flags); vd 474 drivers/dma/dw-edma/dw-edma-core.c struct virt_dma_desc *vd; vd 480 drivers/dma/dw-edma/dw-edma-core.c vd = vchan_next_desc(&chan->vc); vd 481 drivers/dma/dw-edma/dw-edma-core.c if (vd) { vd 484 drivers/dma/dw-edma/dw-edma-core.c desc = vd2dw_edma_desc(vd); vd 489 drivers/dma/dw-edma/dw-edma-core.c list_del(&vd->node); vd 490 drivers/dma/dw-edma/dw-edma-core.c vchan_cookie_complete(vd); vd 496 drivers/dma/dw-edma/dw-edma-core.c list_del(&vd->node); vd 497 drivers/dma/dw-edma/dw-edma-core.c vchan_cookie_complete(vd); vd 516 drivers/dma/dw-edma/dw-edma-core.c struct virt_dma_desc *vd; vd 522 drivers/dma/dw-edma/dw-edma-core.c vd = vchan_next_desc(&chan->vc); vd 523 drivers/dma/dw-edma/dw-edma-core.c if (vd) { vd 524 drivers/dma/dw-edma/dw-edma-core.c list_del(&vd->node); vd 525 drivers/dma/dw-edma/dw-edma-core.c vchan_cookie_complete(vd); vd 69 drivers/dma/dw-edma/dw-edma-core.h struct virt_dma_desc vd; vd 218 drivers/dma/fsl-edma-common.h static inline struct fsl_edma_desc *to_fsl_edma_desc(struct virt_dma_desc *vd) vd 220 drivers/dma/fsl-edma-common.h return container_of(vd, struct fsl_edma_desc, vdesc); vd 287 drivers/dma/fsl-qdma.c static struct fsl_qdma_comp *to_fsl_qdma_comp(struct virt_dma_desc *vd) vd 289 drivers/dma/fsl-qdma.c return container_of(vd, struct fsl_qdma_comp, vdesc); vd 106 drivers/dma/img-mdc-dma.c struct virt_dma_desc vd; vd 178 drivers/dma/img-mdc-dma.c return container_of(vdesc, struct mdc_tx_desc, vd); vd 277 drivers/dma/img-mdc-dma.c static void mdc_desc_free(struct virt_dma_desc *vd) vd 279 drivers/dma/img-mdc-dma.c struct mdc_tx_desc *mdesc = to_mdc_desc(&vd->tx); vd 332 drivers/dma/img-mdc-dma.c return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); vd 335 drivers/dma/img-mdc-dma.c mdc_desc_free(&mdesc->vd); vd 439 drivers/dma/img-mdc-dma.c return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); vd 442 drivers/dma/img-mdc-dma.c mdc_desc_free(&mdesc->vd); vd 516 drivers/dma/img-mdc-dma.c return vchan_tx_prep(&mchan->vc, &mdesc->vd, flags); vd 519 drivers/dma/img-mdc-dma.c mdc_desc_free(&mdesc->vd); vd 527 drivers/dma/img-mdc-dma.c struct virt_dma_desc *vd; vd 531 drivers/dma/img-mdc-dma.c vd = vchan_next_desc(&mchan->vc); vd 532 drivers/dma/img-mdc-dma.c if (!vd) vd 535 drivers/dma/img-mdc-dma.c list_del(&vd->node); vd 537 drivers/dma/img-mdc-dma.c mdesc = to_mdc_desc(&vd->tx); vd 576 drivers/dma/img-mdc-dma.c struct virt_dma_desc *vd; vd 589 drivers/dma/img-mdc-dma.c vd = vchan_find_desc(&mchan->vc, cookie); vd 590 drivers/dma/img-mdc-dma.c if (vd) { vd 591 drivers/dma/img-mdc-dma.c mdesc = to_mdc_desc(&vd->tx); vd 593 drivers/dma/img-mdc-dma.c } else if (mchan->desc && mchan->desc->vd.tx.cookie == cookie) { vd 703 drivers/dma/img-mdc-dma.c vchan_terminate_vdesc(&mchan->desc->vd); vd 794 drivers/dma/img-mdc-dma.c vchan_cyclic_callback(&mdesc->vd); vd 797 drivers/dma/img-mdc-dma.c vchan_cookie_complete(&mdesc->vd); vd 318 drivers/dma/imx-sdma.c struct virt_dma_desc vd; vd 748 drivers/dma/imx-sdma.c return container_of(t, struct sdma_desc, vd.tx); vd 753 drivers/dma/imx-sdma.c struct virt_dma_desc *vd = vchan_next_desc(&sdmac->vc); vd 758 drivers/dma/imx-sdma.c if (!vd) { vd 762 drivers/dma/imx-sdma.c sdmac->desc = desc = to_sdma_desc(&vd->tx); vd 768 drivers/dma/imx-sdma.c list_del(&vd->node); vd 817 drivers/dma/imx-sdma.c dmaengine_desc_get_callback_invoke(&desc->vd.tx, NULL); vd 872 drivers/dma/imx-sdma.c vchan_cookie_complete(&desc->vd); vd 1250 drivers/dma/imx-sdma.c static void sdma_desc_free(struct virt_dma_desc *vd) vd 1252 drivers/dma/imx-sdma.c struct sdma_desc *desc = container_of(vd, struct sdma_desc, vd); vd 1437 drivers/dma/imx-sdma.c return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); vd 1515 drivers/dma/imx-sdma.c return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); vd 1586 drivers/dma/imx-sdma.c return vchan_tx_prep(&sdmac->vc, &desc->vd, flags); vd 1654 drivers/dma/imx-sdma.c struct virt_dma_desc *vd; vd 1663 drivers/dma/imx-sdma.c vd = vchan_find_desc(&sdmac->vc, cookie); vd 1664 drivers/dma/imx-sdma.c if (vd) { vd 1665 drivers/dma/imx-sdma.c desc = to_sdma_desc(&vd->tx); vd 1671 drivers/dma/imx-sdma.c } else if (sdmac->desc && sdmac->desc->vd.tx.cookie == cookie) { vd 71 drivers/dma/k3dma.c struct virt_dma_desc vd; vd 233 drivers/dma/k3dma.c vchan_cookie_complete(&p->ds_run->vd); vd 242 drivers/dma/k3dma.c vchan_cyclic_callback(&p->ds_run->vd); vd 268 drivers/dma/k3dma.c struct virt_dma_desc *vd = vchan_next_desc(&c->vc); vd 280 drivers/dma/k3dma.c if (vd) { vd 282 drivers/dma/k3dma.c container_of(vd, struct k3_dma_desc_sw, vd); vd 287 drivers/dma/k3dma.c list_del(&ds->vd.node); vd 381 drivers/dma/k3dma.c struct virt_dma_desc *vd; vd 398 drivers/dma/k3dma.c vd = vchan_find_desc(&c->vc, cookie); vd 399 drivers/dma/k3dma.c if (vd && !c->cyclic) { vd 400 drivers/dma/k3dma.c bytes = container_of(vd, struct k3_dma_desc_sw, vd)->size; vd 529 drivers/dma/k3dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 585 drivers/dma/k3dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 653 drivers/dma/k3dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 711 drivers/dma/k3dma.c static void k3_dma_free_desc(struct virt_dma_desc *vd) vd 714 drivers/dma/k3dma.c container_of(vd, struct k3_dma_desc_sw, vd); vd 715 drivers/dma/k3dma.c struct k3_dma_dev *d = to_k3_dma(vd->tx.chan->device); vd 745 drivers/dma/k3dma.c vchan_terminate_vdesc(&p->ds_run->vd); vd 80 drivers/dma/mediatek/mtk-cqdma.c struct virt_dma_desc vd; vd 159 drivers/dma/mediatek/mtk-cqdma.c static struct mtk_cqdma_vdesc *to_cqdma_vdesc(struct virt_dma_desc *vd) vd 161 drivers/dma/mediatek/mtk-cqdma.c return container_of(vd, struct mtk_cqdma_vdesc, vd); vd 200 drivers/dma/mediatek/mtk-cqdma.c static void mtk_cqdma_vdesc_free(struct virt_dma_desc *vd) vd 202 drivers/dma/mediatek/mtk-cqdma.c kfree(to_cqdma_vdesc(vd)); vd 268 drivers/dma/mediatek/mtk-cqdma.c struct virt_dma_desc *vd, *vd2; vd 276 drivers/dma/mediatek/mtk-cqdma.c list_for_each_entry_safe(vd, vd2, &cvc->vc.desc_issued, node) { vd 281 drivers/dma/mediatek/mtk-cqdma.c cvd = to_cqdma_vdesc(vd); vd 291 drivers/dma/mediatek/mtk-cqdma.c list_del(&vd->node); vd 339 drivers/dma/mediatek/mtk-cqdma.c vchan_cookie_complete(&cvd->parent->vd); vd 372 drivers/dma/mediatek/mtk-cqdma.c dma_run_dependencies(&cvd->vd.tx); vd 423 drivers/dma/mediatek/mtk-cqdma.c struct virt_dma_desc *vd; vd 427 drivers/dma/mediatek/mtk-cqdma.c list_for_each_entry(vd, &cvc->pc->queue, node) vd 428 drivers/dma/mediatek/mtk-cqdma.c if (vd->tx.cookie == cookie) { vd 430 drivers/dma/mediatek/mtk-cqdma.c return vd; vd 434 drivers/dma/mediatek/mtk-cqdma.c list_for_each_entry(vd, &cvc->vc.desc_issued, node) vd 435 drivers/dma/mediatek/mtk-cqdma.c if (vd->tx.cookie == cookie) vd 436 drivers/dma/mediatek/mtk-cqdma.c return vd; vd 447 drivers/dma/mediatek/mtk-cqdma.c struct virt_dma_desc *vd; vd 457 drivers/dma/mediatek/mtk-cqdma.c vd = mtk_cqdma_find_active_desc(c, cookie); vd 460 drivers/dma/mediatek/mtk-cqdma.c if (vd) { vd 461 drivers/dma/mediatek/mtk-cqdma.c cvd = to_cqdma_vdesc(vd); vd 529 drivers/dma/mediatek/mtk-cqdma.c tx = vchan_tx_prep(to_virt_chan(c), &cvd[i]->vd, flags); vd 548 drivers/dma/mediatek/mtk-cqdma.c return &cvd[0]->vd.tx; vd 132 drivers/dma/mediatek/mtk-hsdma.c struct virt_dma_desc vd; vd 148 drivers/dma/mediatek/mtk-hsdma.c struct virt_dma_desc *vd; vd 259 drivers/dma/mediatek/mtk-hsdma.c static struct mtk_hsdma_vdesc *to_hsdma_vdesc(struct virt_dma_desc *vd) vd 261 drivers/dma/mediatek/mtk-hsdma.c return container_of(vd, struct mtk_hsdma_vdesc, vd); vd 300 drivers/dma/mediatek/mtk-hsdma.c static void mtk_hsdma_vdesc_free(struct virt_dma_desc *vd) vd 302 drivers/dma/mediatek/mtk-hsdma.c kfree(container_of(vd, struct mtk_hsdma_vdesc, vd)); vd 460 drivers/dma/mediatek/mtk-hsdma.c ring->cb[ring->cur_tptr].vd = &hvd->vd; vd 498 drivers/dma/mediatek/mtk-hsdma.c struct virt_dma_desc *vd, *vd2; vd 503 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) { vd 506 drivers/dma/mediatek/mtk-hsdma.c hvd = to_hsdma_vdesc(vd); vd 529 drivers/dma/mediatek/mtk-hsdma.c list_move_tail(&vd->node, &hvc->desc_hw_processing); vd 573 drivers/dma/mediatek/mtk-hsdma.c if (unlikely(!cb->vd)) { vd 579 drivers/dma/mediatek/mtk-hsdma.c hvd = to_hsdma_vdesc(cb->vd); vd 584 drivers/dma/mediatek/mtk-hsdma.c hvc = to_hsdma_vchan(cb->vd->tx.chan); vd 589 drivers/dma/mediatek/mtk-hsdma.c list_del(&cb->vd->node); vd 592 drivers/dma/mediatek/mtk-hsdma.c vchan_cookie_complete(cb->vd); vd 604 drivers/dma/mediatek/mtk-hsdma.c cb->vd = 0; vd 664 drivers/dma/mediatek/mtk-hsdma.c struct virt_dma_desc *vd; vd 666 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry(vd, &hvc->desc_hw_processing, node) vd 667 drivers/dma/mediatek/mtk-hsdma.c if (vd->tx.cookie == cookie) vd 668 drivers/dma/mediatek/mtk-hsdma.c return vd; vd 670 drivers/dma/mediatek/mtk-hsdma.c list_for_each_entry(vd, &hvc->vc.desc_issued, node) vd 671 drivers/dma/mediatek/mtk-hsdma.c if (vd->tx.cookie == cookie) vd 672 drivers/dma/mediatek/mtk-hsdma.c return vd; vd 683 drivers/dma/mediatek/mtk-hsdma.c struct virt_dma_desc *vd; vd 693 drivers/dma/mediatek/mtk-hsdma.c vd = mtk_hsdma_find_active_desc(c, cookie); vd 696 drivers/dma/mediatek/mtk-hsdma.c if (vd) { vd 697 drivers/dma/mediatek/mtk-hsdma.c hvd = to_hsdma_vdesc(vd); vd 735 drivers/dma/mediatek/mtk-hsdma.c return vchan_tx_prep(to_virt_chan(c), &hvd->vd, flags); vd 86 drivers/dma/mediatek/mtk-uart-apdma.c struct virt_dma_desc vd; vd 118 drivers/dma/mediatek/mtk-uart-apdma.c return container_of(t, struct mtk_uart_apdma_desc, vd.tx); vd 132 drivers/dma/mediatek/mtk-uart-apdma.c static void mtk_uart_apdma_desc_free(struct virt_dma_desc *vd) vd 134 drivers/dma/mediatek/mtk-uart-apdma.c struct dma_chan *chan = vd->tx.chan; vd 216 drivers/dma/mediatek/mtk-uart-apdma.c list_del(&d->vd.node); vd 217 drivers/dma/mediatek/mtk-uart-apdma.c vchan_cookie_complete(&d->vd); vd 249 drivers/dma/mediatek/mtk-uart-apdma.c list_del(&d->vd.node); vd 250 drivers/dma/mediatek/mtk-uart-apdma.c vchan_cookie_complete(&d->vd); vd 359 drivers/dma/mediatek/mtk-uart-apdma.c return vchan_tx_prep(&c->vc, &d->vd, tx_flags); vd 365 drivers/dma/mediatek/mtk-uart-apdma.c struct virt_dma_desc *vd; vd 370 drivers/dma/mediatek/mtk-uart-apdma.c vd = vchan_next_desc(&c->vc); vd 371 drivers/dma/mediatek/mtk-uart-apdma.c c->desc = to_mtk_uart_apdma_desc(&vd->tx); vd 128 drivers/dma/moxart-dma.c struct virt_dma_desc vd; vd 178 drivers/dma/moxart-dma.c return container_of(t, struct moxart_desc, vd.tx); vd 181 drivers/dma/moxart-dma.c static void moxart_dma_desc_free(struct virt_dma_desc *vd) vd 183 drivers/dma/moxart-dma.c kfree(container_of(vd, struct moxart_desc, vd)); vd 198 drivers/dma/moxart-dma.c moxart_dma_desc_free(&ch->desc->vd); vd 329 drivers/dma/moxart-dma.c return vchan_tx_prep(&ch->vc, &d->vd, tx_flags); vd 422 drivers/dma/moxart-dma.c struct virt_dma_desc *vd; vd 424 drivers/dma/moxart-dma.c vd = vchan_next_desc(&ch->vc); vd 426 drivers/dma/moxart-dma.c if (!vd) { vd 431 drivers/dma/moxart-dma.c list_del(&vd->node); vd 433 drivers/dma/moxart-dma.c ch->desc = to_moxart_dma_desc(&vd->tx); vd 482 drivers/dma/moxart-dma.c struct virt_dma_desc *vd; vd 493 drivers/dma/moxart-dma.c vd = vchan_find_desc(&ch->vc, cookie); vd 494 drivers/dma/moxart-dma.c if (vd) { vd 495 drivers/dma/moxart-dma.c d = to_moxart_dma_desc(&vd->tx); vd 497 drivers/dma/moxart-dma.c } else if (ch->desc && ch->desc->vd.tx.cookie == cookie) { vd 548 drivers/dma/moxart-dma.c vchan_cookie_complete(&ch->desc->vd); vd 168 drivers/dma/owl-dma.c struct virt_dma_desc vd; vd 295 drivers/dma/owl-dma.c return container_of(tx, struct owl_dma_txd, vd.tx); vd 498 drivers/dma/owl-dma.c struct virt_dma_desc *vd = vchan_next_desc(&vchan->vc); vd 500 drivers/dma/owl-dma.c struct owl_dma_txd *txd = to_owl_txd(&vd->tx); vd 505 drivers/dma/owl-dma.c list_del(&vd->node); vd 618 drivers/dma/owl-dma.c vchan_cookie_complete(&txd->vd); vd 649 drivers/dma/owl-dma.c static void owl_dma_desc_free(struct virt_dma_desc *vd) vd 651 drivers/dma/owl-dma.c struct owl_dma *od = to_owl_dma(vd->tx.chan->device); vd 652 drivers/dma/owl-dma.c struct owl_dma_txd *txd = to_owl_txd(&vd->tx); vd 670 drivers/dma/owl-dma.c owl_dma_desc_free(&vchan->txd->vd); vd 769 drivers/dma/owl-dma.c struct virt_dma_desc *vd; vd 781 drivers/dma/owl-dma.c vd = vchan_find_desc(&vchan->vc, cookie); vd 782 drivers/dma/owl-dma.c if (vd) { vd 783 drivers/dma/owl-dma.c txd = to_owl_txd(&vd->tx); vd 867 drivers/dma/owl-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 931 drivers/dma/owl-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 993 drivers/dma/owl-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 84 drivers/dma/pxa_dma.c struct virt_dma_desc vd; /* Virtual descriptor */ vd 143 drivers/dma/pxa_dma.c container_of((_vd), struct pxad_desc_sw, vd) vd 534 drivers/dma/pxa_dma.c static bool is_desc_completed(struct virt_dma_desc *vd) vd 536 drivers/dma/pxa_dma.c struct pxad_desc_sw *sw_desc = to_pxad_sw_desc(vd); vd 555 drivers/dma/pxa_dma.c struct virt_dma_desc *vd) vd 571 drivers/dma/pxa_dma.c to_pxad_sw_desc(vd)->misaligned) vd 576 drivers/dma/pxa_dma.c pxad_desc_chain(vd_last_issued, vd); vd 577 drivers/dma/pxa_dma.c if (is_chan_running(chan) || is_desc_completed(vd)) vd 607 drivers/dma/pxa_dma.c struct virt_dma_desc *vd, *tmp; vd 620 drivers/dma/pxa_dma.c list_for_each_entry_safe(vd, tmp, &chan->vc.desc_issued, node) { vd 621 drivers/dma/pxa_dma.c vd_completed = is_desc_completed(vd); vd 624 drivers/dma/pxa_dma.c __func__, vd, vd->tx.cookie, vd_completed, vd 626 drivers/dma/pxa_dma.c last_started = vd->tx.cookie; vd 627 drivers/dma/pxa_dma.c if (to_pxad_sw_desc(vd)->cyclic) { vd 628 drivers/dma/pxa_dma.c vchan_cyclic_callback(vd); vd 632 drivers/dma/pxa_dma.c list_del(&vd->node); vd 633 drivers/dma/pxa_dma.c vchan_cookie_complete(vd); vd 656 drivers/dma/pxa_dma.c vd = list_first_entry(&chan->vc.desc_issued, vd 658 drivers/dma/pxa_dma.c pxad_launch_chan(chan, to_pxad_sw_desc(vd)); vd 720 drivers/dma/pxa_dma.c static void pxad_free_desc(struct virt_dma_desc *vd) vd 724 drivers/dma/pxa_dma.c struct pxad_desc_sw *sw_desc = to_pxad_sw_desc(vd); vd 772 drivers/dma/pxa_dma.c pxad_free_desc(&sw_desc->vd); vd 781 drivers/dma/pxa_dma.c *vd = container_of(tx, struct virt_dma_desc, tx); vd 785 drivers/dma/pxa_dma.c set_updater_desc(to_pxad_sw_desc(vd), tx->flags); vd 790 drivers/dma/pxa_dma.c if (list_empty(&vc->desc_submitted) && pxad_try_hotchain(vc, vd)) { vd 791 drivers/dma/pxa_dma.c list_move_tail(&vd->node, &vc->desc_issued); vd 794 drivers/dma/pxa_dma.c __func__, vd, cookie); vd 810 drivers/dma/pxa_dma.c if (chan->misaligned || !to_pxad_sw_desc(vd)->misaligned) vd 811 drivers/dma/pxa_dma.c pxad_desc_chain(vd_chained, vd); vd 817 drivers/dma/pxa_dma.c __func__, vd, cookie, vd_chained ? "cold" : "not"); vd 818 drivers/dma/pxa_dma.c list_move_tail(&vd->node, &vc->desc_submitted); vd 819 drivers/dma/pxa_dma.c chan->misaligned |= to_pxad_sw_desc(vd)->misaligned; vd 849 drivers/dma/pxa_dma.c pxad_tx_prep(struct virt_dma_chan *vc, struct virt_dma_desc *vd, vd 855 drivers/dma/pxa_dma.c INIT_LIST_HEAD(&vd->node); vd 856 drivers/dma/pxa_dma.c tx = vchan_tx_prep(vc, vd, tx_flags); vd 860 drivers/dma/pxa_dma.c vc, vd, vd->tx.cookie, vd 967 drivers/dma/pxa_dma.c return pxad_tx_prep(&chan->vc, &sw_desc->vd, flags); vd 1017 drivers/dma/pxa_dma.c return pxad_tx_prep(&chan->vc, &sw_desc->vd, flags); vd 1070 drivers/dma/pxa_dma.c return pxad_tx_prep(&chan->vc, &sw_desc->vd, flags); vd 1089 drivers/dma/pxa_dma.c struct virt_dma_desc *vd = NULL; vd 1100 drivers/dma/pxa_dma.c list_for_each_entry(vd, &head, node) { vd 1103 drivers/dma/pxa_dma.c vd, vd->tx.cookie, is_desc_completed(vd)); vd 1124 drivers/dma/pxa_dma.c struct virt_dma_desc *vd = NULL; vd 1141 drivers/dma/pxa_dma.c vd = vchan_find_desc(&chan->vc, cookie); vd 1142 drivers/dma/pxa_dma.c if (!vd) vd 1145 drivers/dma/pxa_dma.c sw_desc = to_pxad_sw_desc(vd); vd 1158 drivers/dma/pxa_dma.c if (is_desc_completed(vd)) vd 1193 drivers/dma/pxa_dma.c __func__, vd, cookie, sw_desc, residue); vd 63 drivers/dma/qcom/bam_dma.c struct virt_dma_desc vd; vd 673 drivers/dma/qcom/bam_dma.c return vchan_tx_prep(&bchan->vc, &async_desc->vd, flags); vd 718 drivers/dma/qcom/bam_dma.c list_add(&async_desc->vd.node, &bchan->vc.desc_issued); vd 844 drivers/dma/qcom/bam_dma.c vchan_cookie_complete(&async_desc->vd); vd 846 drivers/dma/qcom/bam_dma.c list_add(&async_desc->vd.node, vd 912 drivers/dma/qcom/bam_dma.c struct virt_dma_desc *vd; vd 926 drivers/dma/qcom/bam_dma.c vd = vchan_find_desc(&bchan->vc, cookie); vd 927 drivers/dma/qcom/bam_dma.c if (vd) { vd 928 drivers/dma/qcom/bam_dma.c residue = container_of(vd, struct bam_async_desc, vd)->length; vd 931 drivers/dma/qcom/bam_dma.c if (async_desc->vd.tx.cookie != cookie) vd 980 drivers/dma/qcom/bam_dma.c struct virt_dma_desc *vd = vchan_next_desc(&bchan->vc); vd 992 drivers/dma/qcom/bam_dma.c if (!vd) vd 999 drivers/dma/qcom/bam_dma.c while (vd && !IS_BUSY(bchan)) { vd 1000 drivers/dma/qcom/bam_dma.c list_del(&vd->node); vd 1002 drivers/dma/qcom/bam_dma.c async_desc = container_of(vd, struct bam_async_desc, vd); vd 1026 drivers/dma/qcom/bam_dma.c vd = vchan_next_desc(&bchan->vc); vd 1028 drivers/dma/qcom/bam_dma.c dmaengine_desc_get_callback(&async_desc->vd.tx, &cb); vd 1038 drivers/dma/qcom/bam_dma.c if (((avail <= async_desc->xfer_len) || !vd || vd 1122 drivers/dma/qcom/bam_dma.c static void bam_dma_free_desc(struct virt_dma_desc *vd) vd 1124 drivers/dma/qcom/bam_dma.c struct bam_async_desc *async_desc = container_of(vd, vd 1125 drivers/dma/qcom/bam_dma.c struct bam_async_desc, vd); vd 167 drivers/dma/s3c24xx-dma.c struct virt_dma_desc vd; vd 412 drivers/dma/s3c24xx-dma.c return container_of(tx, struct s3c24xx_txd, vd.tx); vd 506 drivers/dma/s3c24xx-dma.c struct virt_dma_desc *vd = vchan_next_desc(&s3cchan->vc); vd 507 drivers/dma/s3c24xx-dma.c struct s3c24xx_txd *txd = to_s3c24xx_txd(&vd->tx); vd 509 drivers/dma/s3c24xx-dma.c list_del(&txd->vd.node); vd 635 drivers/dma/s3c24xx-dma.c static void s3c24xx_dma_desc_free(struct virt_dma_desc *vd) vd 637 drivers/dma/s3c24xx-dma.c struct s3c24xx_txd *txd = to_s3c24xx_txd(&vd->tx); vd 638 drivers/dma/s3c24xx-dma.c struct s3c24xx_dma_chan *s3cchan = to_s3c24xx_dma_chan(vd->tx.chan); vd 641 drivers/dma/s3c24xx-dma.c dma_descriptor_unmap(&vd->tx); vd 677 drivers/dma/s3c24xx-dma.c vchan_cyclic_callback(&txd->vd); vd 681 drivers/dma/s3c24xx-dma.c vchan_cookie_complete(&txd->vd); vd 692 drivers/dma/s3c24xx-dma.c vchan_cyclic_callback(&txd->vd); vd 732 drivers/dma/s3c24xx-dma.c vchan_terminate_vdesc(&s3cchan->at->vd); vd 763 drivers/dma/s3c24xx-dma.c struct virt_dma_desc *vd; vd 780 drivers/dma/s3c24xx-dma.c vd = vchan_find_desc(&s3cchan->vc, cookie); vd 781 drivers/dma/s3c24xx-dma.c if (vd) { vd 783 drivers/dma/s3c24xx-dma.c txd = to_s3c24xx_txd(&vd->tx); vd 874 drivers/dma/s3c24xx-dma.c return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags); vd 971 drivers/dma/s3c24xx-dma.c return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags); vd 1060 drivers/dma/s3c24xx-dma.c return vchan_tx_prep(&s3cchan->vc, &txd->vd, flags); vd 73 drivers/dma/sa11x0-dma.c struct virt_dma_desc vd; vd 138 drivers/dma/sa11x0-dma.c struct virt_dma_desc *vd = vchan_next_desc(&c->vc); vd 140 drivers/dma/sa11x0-dma.c return vd ? container_of(vd, struct sa11x0_dma_desc, vd) : NULL; vd 143 drivers/dma/sa11x0-dma.c static void sa11x0_dma_free_desc(struct virt_dma_desc *vd) vd 145 drivers/dma/sa11x0-dma.c kfree(container_of(vd, struct sa11x0_dma_desc, vd)); vd 150 drivers/dma/sa11x0-dma.c list_del(&txd->vd.node); vd 155 drivers/dma/sa11x0-dma.c p->num, &txd->vd, txd->vd.tx.cookie, txd->ddar); vd 229 drivers/dma/sa11x0-dma.c vchan_cookie_complete(&txd->vd); vd 238 drivers/dma/sa11x0-dma.c vchan_cyclic_callback(&txd->vd); vd 423 drivers/dma/sa11x0-dma.c struct virt_dma_desc *vd; vd 441 drivers/dma/sa11x0-dma.c vd = vchan_find_desc(&c->vc, cookie); vd 442 drivers/dma/sa11x0-dma.c if (vd) { vd 443 drivers/dma/sa11x0-dma.c state->residue = container_of(vd, struct sa11x0_dma_desc, vd)->size; vd 450 drivers/dma/sa11x0-dma.c if (p->txd_done && p->txd_done->vd.tx.cookie == cookie) vd 452 drivers/dma/sa11x0-dma.c else if (p->txd_load && p->txd_load->vd.tx.cookie == cookie) vd 599 drivers/dma/sa11x0-dma.c &c->vc, &txd->vd, txd->size, txd->sglen); vd 601 drivers/dma/sa11x0-dma.c return vchan_tx_prep(&c->vc, &txd->vd, flags); vd 660 drivers/dma/sa11x0-dma.c return vchan_tx_prep(&c->vc, &txd->vd, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); vd 776 drivers/dma/sa11x0-dma.c list_add_tail(&p->txd_load->vd.node, &head); vd 780 drivers/dma/sa11x0-dma.c list_add_tail(&p->txd_done->vd.node, &head); vd 52 drivers/dma/sh/usb-dmac.c struct virt_dma_desc vd; vd 63 drivers/dma/sh/usb-dmac.c #define to_usb_dmac_desc(vd) container_of(vd, struct usb_dmac_desc, vd) vd 224 drivers/dma/sh/usb-dmac.c struct virt_dma_desc *vd; vd 226 drivers/dma/sh/usb-dmac.c vd = vchan_next_desc(&chan->vc); vd 227 drivers/dma/sh/usb-dmac.c if (!vd) { vd 237 drivers/dma/sh/usb-dmac.c list_del(&vd->node); vd 239 drivers/dma/sh/usb-dmac.c chan->desc = to_usb_dmac_desc(vd); vd 442 drivers/dma/sh/usb-dmac.c return vchan_tx_prep(&uchan->vc, &desc->vd, dma_flags); vd 507 drivers/dma/sh/usb-dmac.c struct virt_dma_desc *vd; vd 512 drivers/dma/sh/usb-dmac.c vd = vchan_find_desc(&chan->vc, cookie); vd 513 drivers/dma/sh/usb-dmac.c if (!vd) vd 515 drivers/dma/sh/usb-dmac.c desc = to_usb_dmac_desc(vd); vd 565 drivers/dma/sh/usb-dmac.c static void usb_dmac_virt_desc_free(struct virt_dma_desc *vd) vd 567 drivers/dma/sh/usb-dmac.c struct usb_dmac_desc *desc = to_usb_dmac_desc(vd); vd 568 drivers/dma/sh/usb-dmac.c struct usb_dmac_chan *chan = to_usb_dmac_chan(vd->tx.chan); vd 588 drivers/dma/sh/usb-dmac.c desc->done_cookie = desc->vd.tx.cookie; vd 589 drivers/dma/sh/usb-dmac.c vchan_cookie_complete(&desc->vd); vd 185 drivers/dma/sprd-dma.c struct virt_dma_desc vd; vd 215 drivers/dma/sprd-dma.c static void sprd_dma_free_desc(struct virt_dma_desc *vd); vd 233 drivers/dma/sprd-dma.c static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd) vd 235 drivers/dma/sprd-dma.c return container_of(vd, struct sprd_dma_desc, vd); vd 511 drivers/dma/sprd-dma.c struct virt_dma_desc *vd = vchan_next_desc(&schan->vc); vd 513 drivers/dma/sprd-dma.c if (!vd) vd 516 drivers/dma/sprd-dma.c list_del(&vd->node); vd 517 drivers/dma/sprd-dma.c schan->cur_desc = to_sprd_dma_desc(vd); vd 592 drivers/dma/sprd-dma.c vchan_cyclic_callback(&sdesc->vd); vd 598 drivers/dma/sprd-dma.c vchan_cookie_complete(&sdesc->vd); vd 622 drivers/dma/sprd-dma.c cur_vd = &schan->cur_desc->vd; vd 639 drivers/dma/sprd-dma.c struct virt_dma_desc *vd; vd 649 drivers/dma/sprd-dma.c vd = vchan_find_desc(&schan->vc, cookie); vd 650 drivers/dma/sprd-dma.c if (vd) { vd 651 drivers/dma/sprd-dma.c struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd); vd 662 drivers/dma/sprd-dma.c } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) { vd 916 drivers/dma/sprd-dma.c return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); vd 1002 drivers/dma/sprd-dma.c return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); vd 1048 drivers/dma/sprd-dma.c cur_vd = &schan->cur_desc->vd; vd 1062 drivers/dma/sprd-dma.c static void sprd_dma_free_desc(struct virt_dma_desc *vd) vd 1064 drivers/dma/sprd-dma.c struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd); vd 26 drivers/dma/st_fdma.c static struct st_fdma_desc *to_st_fdma_desc(struct virt_dma_desc *vd) vd 28 drivers/dma/st_fdma.c return container_of(vd, struct st_fdma_desc, vdesc); vd 584 drivers/dma/st_fdma.c struct virt_dma_desc *vd; vd 593 drivers/dma/st_fdma.c vd = vchan_find_desc(&fchan->vchan, cookie); vd 595 drivers/dma/st_fdma.c txstate->residue = st_fdma_desc_residue(fchan, vd, true); vd 596 drivers/dma/st_fdma.c else if (vd) vd 597 drivers/dma/st_fdma.c txstate->residue = st_fdma_desc_residue(fchan, vd, false); vd 155 drivers/dma/sun4i-dma.c struct virt_dma_desc vd; vd 182 drivers/dma/sun4i-dma.c static struct sun4i_dma_contract *to_sun4i_dma_contract(struct virt_dma_desc *vd) vd 184 drivers/dma/sun4i-dma.c return container_of(vd, struct sun4i_dma_contract, vd); vd 325 drivers/dma/sun4i-dma.c struct virt_dma_desc *vd; vd 348 drivers/dma/sun4i-dma.c vd = vchan_next_desc(&vchan->vc); vd 349 drivers/dma/sun4i-dma.c if (!vd) { vd 356 drivers/dma/sun4i-dma.c contract = to_sun4i_dma_contract(vd); vd 359 drivers/dma/sun4i-dma.c list_del(&contract->vd.node); vd 360 drivers/dma/sun4i-dma.c vchan_cookie_complete(&contract->vd); vd 595 drivers/dma/sun4i-dma.c static void sun4i_dma_free_contract(struct virt_dma_desc *vd) vd 597 drivers/dma/sun4i-dma.c struct sun4i_dma_contract *contract = to_sun4i_dma_contract(vd); vd 657 drivers/dma/sun4i-dma.c return vchan_tx_prep(&vchan->vc, &contract->vd, flags); vd 763 drivers/dma/sun4i-dma.c return vchan_tx_prep(&vchan->vc, &contract->vd, flags); vd 859 drivers/dma/sun4i-dma.c return vchan_tx_prep(&vchan->vc, &contract->vd, flags); vd 945 drivers/dma/sun4i-dma.c struct virt_dma_desc *vd; vd 955 drivers/dma/sun4i-dma.c vd = vchan_find_desc(&vchan->vc, cookie); vd 956 drivers/dma/sun4i-dma.c if (!vd) vd 958 drivers/dma/sun4i-dma.c contract = to_sun4i_dma_contract(vd); vd 1060 drivers/dma/sun4i-dma.c vchan_cyclic_callback(&contract->vd); vd 1074 drivers/dma/sun4i-dma.c vchan_cyclic_callback(&contract->vd); vd 162 drivers/dma/sun6i-dma.c struct virt_dma_desc vd; vd 223 drivers/dma/sun6i-dma.c return container_of(tx, struct sun6i_desc, vd.tx); vd 401 drivers/dma/sun6i-dma.c static void sun6i_dma_free_desc(struct virt_dma_desc *vd) vd 403 drivers/dma/sun6i-dma.c struct sun6i_desc *txd = to_sun6i_desc(&vd->tx); vd 404 drivers/dma/sun6i-dma.c struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vd->tx.chan->device); vd 558 drivers/dma/sun6i-dma.c vchan_cyclic_callback(&pchan->desc->vd); vd 561 drivers/dma/sun6i-dma.c vchan_cookie_complete(&pchan->desc->vd); vd 675 drivers/dma/sun6i-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 752 drivers/dma/sun6i-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 820 drivers/dma/sun6i-dma.c return vchan_tx_prep(&vchan->vc, &txd->vd, flags); vd 901 drivers/dma/sun6i-dma.c struct virt_dma_desc *vd = &pchan->desc->vd; vd 904 drivers/dma/sun6i-dma.c list_add_tail(&vd->node, &vc->desc_completed); vd 934 drivers/dma/sun6i-dma.c struct virt_dma_desc *vd; vd 946 drivers/dma/sun6i-dma.c vd = vchan_find_desc(&vchan->vc, cookie); vd 947 drivers/dma/sun6i-dma.c txd = to_sun6i_desc(&vd->tx); vd 949 drivers/dma/sun6i-dma.c if (vd) { vd 120 drivers/dma/tegra210-adma.c struct virt_dma_desc vd; vd 198 drivers/dma/tegra210-adma.c return container_of(td, struct tegra_adma_desc, vd.tx); vd 206 drivers/dma/tegra210-adma.c static void tegra_adma_desc_free(struct virt_dma_desc *vd) vd 208 drivers/dma/tegra210-adma.c kfree(container_of(vd, struct tegra_adma_desc, vd)); vd 353 drivers/dma/tegra210-adma.c struct virt_dma_desc *vd = vchan_next_desc(&tdc->vc); vd 357 drivers/dma/tegra210-adma.c if (!vd) vd 360 drivers/dma/tegra210-adma.c list_del(&vd->node); vd 362 drivers/dma/tegra210-adma.c desc = to_tegra_adma_desc(&vd->tx); vd 421 drivers/dma/tegra210-adma.c vchan_cyclic_callback(&tdc->desc->vd); vd 513 drivers/dma/tegra210-adma.c struct virt_dma_desc *vd; vd 524 drivers/dma/tegra210-adma.c vd = vchan_find_desc(&tdc->vc, cookie); vd 525 drivers/dma/tegra210-adma.c if (vd) { vd 526 drivers/dma/tegra210-adma.c desc = to_tegra_adma_desc(&vd->tx); vd 528 drivers/dma/tegra210-adma.c } else if (tdc->desc && tdc->desc->vd.tx.cookie == cookie) { vd 644 drivers/dma/tegra210-adma.c return vchan_tx_prep(&tdc->vc, &desc->vd, flags); vd 90 drivers/dma/ti/omap-dma.c struct virt_dma_desc vd; vd 223 drivers/dma/ti/omap-dma.c return container_of(t, struct omap_desc, vd.tx); vd 226 drivers/dma/ti/omap-dma.c static void omap_dma_desc_free(struct virt_dma_desc *vd) vd 228 drivers/dma/ti/omap-dma.c struct omap_desc *d = to_omap_dma_desc(&vd->tx); vd 231 drivers/dma/ti/omap-dma.c struct omap_dmadev *od = to_omap_dma_dev(vd->tx.chan->device); vd 531 drivers/dma/ti/omap-dma.c struct virt_dma_desc *vd = vchan_next_desc(&c->vc); vd 535 drivers/dma/ti/omap-dma.c if (!vd) { vd 540 drivers/dma/ti/omap-dma.c list_del(&vd->node); vd 542 drivers/dma/ti/omap-dma.c c->desc = d = to_omap_dma_desc(&vd->tx); vd 585 drivers/dma/ti/omap-dma.c vchan_cyclic_callback(&d->vd); vd 588 drivers/dma/ti/omap-dma.c vchan_cookie_complete(&d->vd); vd 826 drivers/dma/ti/omap-dma.c if (c->desc && c->desc->vd.tx.cookie == cookie) vd 844 drivers/dma/ti/omap-dma.c struct virt_dma_desc *vd = vchan_find_desc(&c->vc, cookie); vd 846 drivers/dma/ti/omap-dma.c if (vd) vd 848 drivers/dma/ti/omap-dma.c to_omap_dma_desc(&vd->tx)); vd 865 drivers/dma/ti/omap-dma.c vchan_cookie_complete(&d->vd); vd 1064 drivers/dma/ti/omap-dma.c return vchan_tx_prep(&c->vc, &d->vd, tx_flags); vd 1165 drivers/dma/ti/omap-dma.c return vchan_tx_prep(&c->vc, &d->vd, flags); vd 1211 drivers/dma/ti/omap-dma.c return vchan_tx_prep(&c->vc, &d->vd, tx_flags); vd 1294 drivers/dma/ti/omap-dma.c return vchan_tx_prep(&c->vc, &d->vd, flags); vd 1328 drivers/dma/ti/omap-dma.c vchan_terminate_vdesc(&c->desc->vd); vd 52 drivers/dma/uniphier-mdmac.c struct virt_dma_desc vd; vd 81 drivers/dma/uniphier-mdmac.c to_uniphier_mdmac_desc(struct virt_dma_desc *vd) vd 83 drivers/dma/uniphier-mdmac.c return container_of(vd, struct uniphier_mdmac_desc, vd); vd 90 drivers/dma/uniphier-mdmac.c struct virt_dma_desc *vd; vd 92 drivers/dma/uniphier-mdmac.c vd = vchan_next_desc(&mc->vc); vd 93 drivers/dma/uniphier-mdmac.c if (!vd) { vd 98 drivers/dma/uniphier-mdmac.c list_del(&vd->node); vd 100 drivers/dma/uniphier-mdmac.c mc->md = to_uniphier_mdmac_desc(vd); vd 210 drivers/dma/uniphier-mdmac.c vchan_cookie_complete(&md->vd); vd 249 drivers/dma/uniphier-mdmac.c return vchan_tx_prep(vc, &md->vd, flags); vd 263 drivers/dma/uniphier-mdmac.c vchan_terminate_vdesc(&mc->md->vd); vd 286 drivers/dma/uniphier-mdmac.c struct virt_dma_desc *vd; vd 304 drivers/dma/uniphier-mdmac.c if (mc->md && mc->md->vd.tx.cookie == cookie) { vd 312 drivers/dma/uniphier-mdmac.c vd = vchan_find_desc(vc, cookie); vd 313 drivers/dma/uniphier-mdmac.c if (vd) vd 314 drivers/dma/uniphier-mdmac.c md = to_uniphier_mdmac_desc(vd); vd 342 drivers/dma/uniphier-mdmac.c static void uniphier_mdmac_desc_free(struct virt_dma_desc *vd) vd 344 drivers/dma/uniphier-mdmac.c kfree(to_uniphier_mdmac_desc(vd)); vd 22 drivers/dma/virt-dma.c struct virt_dma_desc *vd = to_virt_desc(tx); vd 29 drivers/dma/virt-dma.c list_move_tail(&vd->node, &vc->desc_submitted); vd 33 drivers/dma/virt-dma.c vc, vd, cookie); vd 52 drivers/dma/virt-dma.c struct virt_dma_desc *vd = to_virt_desc(tx); vd 56 drivers/dma/virt-dma.c list_del(&vd->node); vd 60 drivers/dma/virt-dma.c vc, vd, vd->tx.cookie); vd 61 drivers/dma/virt-dma.c vc->desc_free(vd); vd 69 drivers/dma/virt-dma.c struct virt_dma_desc *vd; vd 71 drivers/dma/virt-dma.c list_for_each_entry(vd, &vc->desc_issued, node) vd 72 drivers/dma/virt-dma.c if (vd->tx.cookie == cookie) vd 73 drivers/dma/virt-dma.c return vd; vd 86 drivers/dma/virt-dma.c struct virt_dma_desc *vd, *_vd; vd 92 drivers/dma/virt-dma.c vd = vc->cyclic; vd 93 drivers/dma/virt-dma.c if (vd) { vd 95 drivers/dma/virt-dma.c dmaengine_desc_get_callback(&vd->tx, &cb); vd 101 drivers/dma/virt-dma.c dmaengine_desc_callback_invoke(&cb, &vd->tx_result); vd 103 drivers/dma/virt-dma.c list_for_each_entry_safe(vd, _vd, &head, node) { vd 104 drivers/dma/virt-dma.c dmaengine_desc_get_callback(&vd->tx, &cb); vd 106 drivers/dma/virt-dma.c list_del(&vd->node); vd 107 drivers/dma/virt-dma.c dmaengine_desc_callback_invoke(&cb, &vd->tx_result); vd 108 drivers/dma/virt-dma.c vchan_vdesc_fini(vd); vd 114 drivers/dma/virt-dma.c struct virt_dma_desc *vd, *_vd; vd 116 drivers/dma/virt-dma.c list_for_each_entry_safe(vd, _vd, head, node) { vd 117 drivers/dma/virt-dma.c if (dmaengine_desc_test_reuse(&vd->tx)) { vd 118 drivers/dma/virt-dma.c list_move_tail(&vd->node, &vc->desc_allocated); vd 120 drivers/dma/virt-dma.c dev_dbg(vc->chan.device->dev, "txd %p: freeing\n", vd); vd 121 drivers/dma/virt-dma.c list_del(&vd->node); vd 122 drivers/dma/virt-dma.c vc->desc_free(vd); vd 57 drivers/dma/virt-dma.h struct virt_dma_desc *vd, unsigned long tx_flags) vd 61 drivers/dma/virt-dma.h dma_async_tx_descriptor_init(&vd->tx, &vc->chan); vd 62 drivers/dma/virt-dma.h vd->tx.flags = tx_flags; vd 63 drivers/dma/virt-dma.h vd->tx.tx_submit = vchan_tx_submit; vd 64 drivers/dma/virt-dma.h vd->tx.desc_free = vchan_tx_desc_free; vd 66 drivers/dma/virt-dma.h vd->tx_result.result = DMA_TRANS_NOERROR; vd 67 drivers/dma/virt-dma.h vd->tx_result.residue = 0; vd 70 drivers/dma/virt-dma.h list_add_tail(&vd->node, &vc->desc_allocated); vd 73 drivers/dma/virt-dma.h return &vd->tx; vd 94 drivers/dma/virt-dma.h static inline void vchan_cookie_complete(struct virt_dma_desc *vd) vd 96 drivers/dma/virt-dma.h struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vd 99 drivers/dma/virt-dma.h cookie = vd->tx.cookie; vd 100 drivers/dma/virt-dma.h dma_cookie_complete(&vd->tx); vd 102 drivers/dma/virt-dma.h vd, cookie); vd 103 drivers/dma/virt-dma.h list_add_tail(&vd->node, &vc->desc_completed); vd 112 drivers/dma/virt-dma.h static inline void vchan_vdesc_fini(struct virt_dma_desc *vd) vd 114 drivers/dma/virt-dma.h struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vd 116 drivers/dma/virt-dma.h if (dmaengine_desc_test_reuse(&vd->tx)) vd 117 drivers/dma/virt-dma.h list_add(&vd->node, &vc->desc_allocated); vd 119 drivers/dma/virt-dma.h vc->desc_free(vd); vd 126 drivers/dma/virt-dma.h static inline void vchan_cyclic_callback(struct virt_dma_desc *vd) vd 128 drivers/dma/virt-dma.h struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vd 130 drivers/dma/virt-dma.h vc->cyclic = vd; vd 140 drivers/dma/virt-dma.h static inline void vchan_terminate_vdesc(struct virt_dma_desc *vd) vd 142 drivers/dma/virt-dma.h struct virt_dma_chan *vc = to_virt_chan(vd->tx.chan); vd 148 drivers/dma/virt-dma.h vc->vd_terminated = vd; vd 149 drivers/dma/virt-dma.h if (vc->cyclic == vd) vd 186 drivers/dma/virt-dma.h struct virt_dma_desc *vd; vd 192 drivers/dma/virt-dma.h list_for_each_entry(vd, &head, node) vd 193 drivers/dma/virt-dma.h dmaengine_desc_clear_reuse(&vd->tx); vd 88 drivers/dma/zx_dma.c struct virt_dma_desc vd; vd 190 drivers/dma/zx_dma.c struct virt_dma_desc *vd = vchan_next_desc(&c->vc); vd 198 drivers/dma/zx_dma.c if (vd) { vd 200 drivers/dma/zx_dma.c container_of(vd, struct zx_dma_desc_sw, vd); vd 205 drivers/dma/zx_dma.c list_del(&ds->vd.node); vd 292 drivers/dma/zx_dma.c vchan_cyclic_callback(&p->ds_run->vd); vd 294 drivers/dma/zx_dma.c vchan_cookie_complete(&p->ds_run->vd); vd 337 drivers/dma/zx_dma.c struct virt_dma_desc *vd; vd 354 drivers/dma/zx_dma.c vd = vchan_find_desc(&c->vc, cookie); vd 355 drivers/dma/zx_dma.c if (vd) { vd 356 drivers/dma/zx_dma.c bytes = container_of(vd, struct zx_dma_desc_sw, vd)->size; vd 542 drivers/dma/zx_dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 600 drivers/dma/zx_dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 643 drivers/dma/zx_dma.c return vchan_tx_prep(&c->vc, &ds->vd, flags); vd 715 drivers/dma/zx_dma.c static void zx_dma_free_desc(struct virt_dma_desc *vd) vd 718 drivers/dma/zx_dma.c container_of(vd, struct zx_dma_desc_sw, vd); vd 719 drivers/dma/zx_dma.c struct zx_dma_dev *d = to_zx_dma(vd->tx.chan->device); vd 156 drivers/firmware/efi/test/efi_test.c efi_guid_t vendor_guid, *vd = NULL; vd 175 drivers/firmware/efi/test/efi_test.c vd = &vendor_guid; vd 196 drivers/firmware/efi/test/efi_test.c status = efi.get_variable(name, vd, at, dz, data); vd 421 drivers/firmware/efi/test/efi_test.c efi_guid_t *vd = NULL; vd 444 drivers/firmware/efi/test/efi_test.c vd = &vendor_guid; vd 471 drivers/firmware/efi/test/efi_test.c status = efi.get_next_variable(ns, name, vd); vd 505 drivers/firmware/efi/test/efi_test.c if (vd) { vd 506 drivers/firmware/efi/test/efi_test.c if (copy_to_user(getnextvariablename.vendor_guid, vd, vd 105 drivers/gpu/drm/i2c/ch7006_mode.c #define __MODE(f, hd, vd, ht, vt, hsynp, vsynp, \ vd 108 drivers/gpu/drm/i2c/ch7006_mode.c .name = #hd "x" #vd, \ vd 117 drivers/gpu/drm/i2c/ch7006_mode.c .vdisplay = vd, \ vd 118 drivers/gpu/drm/i2c/ch7006_mode.c .vsync_start = vd + 10, \ vd 119 drivers/gpu/drm/i2c/ch7006_mode.c .vsync_end = vd + 26, \ vd 135 drivers/gpu/drm/i2c/ch7006_mode.c #define MODE(f, hd, vd, ht, vt, hsynp, vsynp, \ vd 137 drivers/gpu/drm/i2c/ch7006_mode.c __MODE(f, hd, vd, ht, vt, hsynp, vsynp, subc, scale, \ vd 138 drivers/gpu/drm/i2c/ch7006_mode.c scale_mask, norm_mask, hd, vd) vd 8204 drivers/gpu/drm/i915/intel_pm.c const int vd = _pxvid_to_vd(pxvid); vd 8205 drivers/gpu/drm/i915/intel_pm.c const int vm = vd - 1125; vd 8210 drivers/gpu/drm/i915/intel_pm.c return vd; vd 1334 drivers/media/pci/intel/ipu3/ipu3-cio2.c struct video_device *vd = container_of(link->sink->entity, vd 1336 drivers/media/pci/intel/ipu3/ipu3-cio2.c struct cio2_queue *q = container_of(vd, struct cio2_queue, vdev); vd 1337 drivers/media/pci/intel/ipu3/ipu3-cio2.c struct cio2_device *cio2 = video_get_drvdata(vd); vd 1343 drivers/media/pci/intel/ipu3/ipu3-cio2.c "video node %s pad not connected\n", vd->name); vd 54 drivers/media/pci/ttpci/budget-av.c struct video_device vd; vd 1375 drivers/media/pci/ttpci/budget-av.c saa7146_unregister_device(&budget_av->vd, dev); vd 1473 drivers/media/pci/ttpci/budget-av.c if ((err = saa7146_register_device(&budget_av->vd, dev, "knc1", VFL_TYPE_GRABBER))) { vd 547 drivers/media/radio/radio-tea5777.c tea->vd = tea575x_radio; vd 548 drivers/media/radio/radio-tea5777.c video_set_drvdata(&tea->vd, tea); vd 550 drivers/media/radio/radio-tea5777.c strscpy(tea->vd.name, tea->v4l2_dev->name, sizeof(tea->vd.name)); vd 551 drivers/media/radio/radio-tea5777.c tea->vd.lock = &tea->mutex; vd 552 drivers/media/radio/radio-tea5777.c tea->vd.v4l2_dev = tea->v4l2_dev; vd 553 drivers/media/radio/radio-tea5777.c tea->vd.device_caps = V4L2_CAP_TUNER | V4L2_CAP_RADIO | vd 557 drivers/media/radio/radio-tea5777.c tea->vd.fops = &tea->fops; vd 559 drivers/media/radio/radio-tea5777.c tea->vd.ctrl_handler = &tea->ctrl_handler; vd 571 drivers/media/radio/radio-tea5777.c res = video_register_device(&tea->vd, VFL_TYPE_RADIO, -1); vd 574 drivers/media/radio/radio-tea5777.c v4l2_ctrl_handler_free(tea->vd.ctrl_handler); vd 584 drivers/media/radio/radio-tea5777.c video_unregister_device(&tea->vd); vd 585 drivers/media/radio/radio-tea5777.c v4l2_ctrl_handler_free(tea->vd.ctrl_handler); vd 53 drivers/media/radio/radio-tea5777.h struct video_device vd; /* video device */ vd 522 drivers/media/radio/tea575x.c tea->vd = tea575x_radio; vd 523 drivers/media/radio/tea575x.c video_set_drvdata(&tea->vd, tea); vd 525 drivers/media/radio/tea575x.c strscpy(tea->vd.name, tea->v4l2_dev->name, sizeof(tea->vd.name)); vd 526 drivers/media/radio/tea575x.c tea->vd.lock = &tea->mutex; vd 527 drivers/media/radio/tea575x.c tea->vd.v4l2_dev = tea->v4l2_dev; vd 528 drivers/media/radio/tea575x.c tea->vd.device_caps = V4L2_CAP_TUNER | V4L2_CAP_RADIO; vd 530 drivers/media/radio/tea575x.c tea->vd.device_caps |= V4L2_CAP_HW_FREQ_SEEK; vd 533 drivers/media/radio/tea575x.c tea->vd.fops = &tea->fops; vd 536 drivers/media/radio/tea575x.c v4l2_disable_ioctl(&tea->vd, VIDIOC_S_HW_FREQ_SEEK); vd 539 drivers/media/radio/tea575x.c tea->vd.ctrl_handler = &tea->ctrl_handler; vd 561 drivers/media/radio/tea575x.c retval = video_register_device(&tea->vd, VFL_TYPE_RADIO, tea->radio_nr); vd 564 drivers/media/radio/tea575x.c v4l2_ctrl_handler_free(tea->vd.ctrl_handler); vd 574 drivers/media/radio/tea575x.c video_unregister_device(&tea->vd); vd 575 drivers/media/radio/tea575x.c v4l2_ctrl_handler_free(tea->vd.ctrl_handler); vd 1230 drivers/media/usb/stkwebcam/stk-webcam.c static void stk_v4l_dev_release(struct video_device *vd) vd 1232 drivers/media/usb/stkwebcam/stk-webcam.c struct stk_camera *dev = vdev_to_camera(vd); vd 68 drivers/misc/cxl/flash.c u32 vd, char *value) vd 84 drivers/misc/cxl/flash.c new_prop->length = vd; vd 91 drivers/misc/cxl/flash.c memcpy(new_prop->value, value, vd); vd 96 drivers/misc/cxl/flash.c dn, name, vd, be32_to_cpu(*val)); vd 115 drivers/misc/cxl/flash.c u32 vd; vd 143 drivers/misc/cxl/flash.c vd = be32_to_cpu(*(__be32 *)prop_data); vd 144 drivers/misc/cxl/flash.c prop_data += vd + sizeof(vd); vd 153 drivers/misc/cxl/flash.c vd = be32_to_cpu(*(__be32 *)prop_data); vd 154 drivers/misc/cxl/flash.c prop_data += sizeof(vd); vd 156 drivers/misc/cxl/flash.c if ((vd != 0x00000000) && (vd != 0x80000000)) { vd 157 drivers/misc/cxl/flash.c ret = update_property(dn, prop_name, vd, vd 163 drivers/misc/cxl/flash.c prop_data += vd; vd 60 drivers/misc/mic/vop/vop_main.c #define to_vopvdev(vd) container_of(vd, struct _vop_vdev, vdev) vd 180 drivers/video/fbdev/asiliantfb.c unsigned vd = p->var.yres; vd 208 drivers/video/fbdev/asiliantfb.c write_cr(0x12, (vd - 1) & 0xff); vd 209 drivers/video/fbdev/asiliantfb.c write_cr(0x31, ((vd - 1) & 0xf00) >> 8); vd 193 drivers/video/fbdev/matrox/matroxfb_misc.c unsigned int vd, vs, ve, vt, lc; vd 268 drivers/video/fbdev/matrox/matroxfb_misc.c vd = m->VDisplay - 1; vd 272 drivers/video/fbdev/matrox/matroxfb_misc.c lc = vd; vd 297 drivers/video/fbdev/matrox/matroxfb_misc.c ((vd & 0x400) >> 8) | /* disp end */ vd 298 drivers/video/fbdev/matrox/matroxfb_misc.c ((vd & 0xC00) >> 7) | /* vblanking start */ vd 312 drivers/video/fbdev/matrox/matroxfb_misc.c ((vd & 0x100) >> 7) | vd 314 drivers/video/fbdev/matrox/matroxfb_misc.c ((vd & 0x100) >> 5) | vd 317 drivers/video/fbdev/matrox/matroxfb_misc.c ((vd & 0x200) >> 3) | vd 320 drivers/video/fbdev/matrox/matroxfb_misc.c hw->CRTC[9] = ((vd & 0x200) >> 4) | vd 328 drivers/video/fbdev/matrox/matroxfb_misc.c hw->CRTC[18] = vd /* & 0xFF */; vd 331 drivers/video/fbdev/matrox/matroxfb_misc.c hw->CRTC[21] = vd /* & 0xFF */; vd 552 drivers/video/fbdev/tdfxfb.c u32 vd, vs, ve, vt, vbs, vbe; vd 589 drivers/video/fbdev/tdfxfb.c vd = (info->var.yres << 1) - 1; vd 590 drivers/video/fbdev/tdfxfb.c vs = vd + (info->var.lower_margin << 1); vd 597 drivers/video/fbdev/tdfxfb.c vd = info->var.yres - 1; vd 598 drivers/video/fbdev/tdfxfb.c vs = vd + info->var.lower_margin; vd 604 drivers/video/fbdev/tdfxfb.c vbs = vd; vd 651 drivers/video/fbdev/tdfxfb.c ((vd & 0x200) >> 3) | vd 655 drivers/video/fbdev/tdfxfb.c ((vd & 0x100) >> 7) | vd 660 drivers/video/fbdev/tdfxfb.c reg.crt[0x12] = vd; vd 675 drivers/video/fbdev/tdfxfb.c ((vd & 0x400) >> 8) | vd 134 include/drm/drm_modes.h #define DRM_MODE(nm, t, c, hd, hss, hse, ht, hsk, vd, vss, vse, vt, vs, f) \ vd 137 include/drm/drm_modes.h .htotal = (ht), .hskew = (hsk), .vdisplay = (vd), \ vd 151 include/drm/drm_modes.h #define DRM_SIMPLE_MODE(hd, vd, hd_mm, vd_mm) \ vd 154 include/drm/drm_modes.h .htotal = (hd), .vdisplay = (vd), .vsync_start = (vd), \ vd 155 include/drm/drm_modes.h .vsync_end = (vd), .vtotal = (vd), .width_mm = (hd_mm), \ vd 39 include/media/drv-intf/tea575x.h struct video_device vd; /* video device */ vd 68 include/sound/control.h struct snd_kcontrol_volatile vd[0]; /* volatile data */ vd 9 include/vdso/helpers.h static __always_inline u32 vdso_read_begin(const struct vdso_data *vd) vd 13 include/vdso/helpers.h while ((seq = READ_ONCE(vd->seq)) & 1) vd 20 include/vdso/helpers.h static __always_inline u32 vdso_read_retry(const struct vdso_data *vd, vd 26 include/vdso/helpers.h seq = READ_ONCE(vd->seq); vd 30 include/vdso/helpers.h static __always_inline void vdso_write_begin(struct vdso_data *vd) vd 37 include/vdso/helpers.h WRITE_ONCE(vd[CS_HRES_COARSE].seq, vd[CS_HRES_COARSE].seq + 1); vd 38 include/vdso/helpers.h WRITE_ONCE(vd[CS_RAW].seq, vd[CS_RAW].seq + 1); vd 42 include/vdso/helpers.h static __always_inline void vdso_write_end(struct vdso_data *vd) vd 50 include/vdso/helpers.h WRITE_ONCE(vd[CS_HRES_COARSE].seq, vd[CS_HRES_COARSE].seq + 1); vd 51 include/vdso/helpers.h WRITE_ONCE(vd[CS_RAW].seq, vd[CS_RAW].seq + 1); vd 2104 kernel/trace/trace_events_filter.c #define DATA_REC(m, va, vb, vc, vd, ve, vf, vg, vh, nvisit) \ vd 2107 kernel/trace/trace_events_filter.c .rec = { .a = va, .b = vb, .c = vc, .d = vd, \ vd 41 lib/vdso/gettimeofday.c static int do_hres(const struct vdso_data *vd, clockid_t clk, vd 44 lib/vdso/gettimeofday.c const struct vdso_timestamp *vdso_ts = &vd->basetime[clk]; vd 49 lib/vdso/gettimeofday.c seq = vdso_read_begin(vd); vd 50 lib/vdso/gettimeofday.c cycles = __arch_get_hw_counter(vd->clock_mode); vd 52 lib/vdso/gettimeofday.c last = vd->cycle_last; vd 56 lib/vdso/gettimeofday.c ns += vdso_calc_delta(cycles, last, vd->mask, vd->mult); vd 57 lib/vdso/gettimeofday.c ns >>= vd->shift; vd 59 lib/vdso/gettimeofday.c } while (unlikely(vdso_read_retry(vd, seq))); vd 71 lib/vdso/gettimeofday.c static void do_coarse(const struct vdso_data *vd, clockid_t clk, vd 74 lib/vdso/gettimeofday.c const struct vdso_timestamp *vdso_ts = &vd->basetime[clk]; vd 78 lib/vdso/gettimeofday.c seq = vdso_read_begin(vd); vd 81 lib/vdso/gettimeofday.c } while (unlikely(vdso_read_retry(vd, seq))); vd 87 lib/vdso/gettimeofday.c const struct vdso_data *vd = __arch_get_vdso_data(); vd 100 lib/vdso/gettimeofday.c return do_hres(&vd[CS_HRES_COARSE], clock, ts); vd 102 lib/vdso/gettimeofday.c do_coarse(&vd[CS_HRES_COARSE], clock, ts); vd 105 lib/vdso/gettimeofday.c return do_hres(&vd[CS_RAW], clock, ts); vd 146 lib/vdso/gettimeofday.c const struct vdso_data *vd = __arch_get_vdso_data(); vd 151 lib/vdso/gettimeofday.c if (do_hres(&vd[CS_HRES_COARSE], CLOCK_REALTIME, &ts)) vd 159 lib/vdso/gettimeofday.c tz->tz_minuteswest = vd[CS_HRES_COARSE].tz_minuteswest; vd 160 lib/vdso/gettimeofday.c tz->tz_dsttime = vd[CS_HRES_COARSE].tz_dsttime; vd 169 lib/vdso/gettimeofday.c const struct vdso_data *vd = __arch_get_vdso_data(); vd 170 lib/vdso/gettimeofday.c time_t t = READ_ONCE(vd[CS_HRES_COARSE].basetime[CLOCK_REALTIME].sec); vd 183 lib/vdso/gettimeofday.c const struct vdso_data *vd = __arch_get_vdso_data(); vd 192 lib/vdso/gettimeofday.c hrtimer_res = READ_ONCE(vd[CS_HRES_COARSE].hrtimer_res); vd 120 sound/core/control.c if (control->vd[idx].owner == ctl) vd 121 sound/core/control.c control->vd[idx].owner = NULL; vd 204 sound/core/control.c *kctl = kzalloc(struct_size(*kctl, vd, count), GFP_KERNEL); vd 209 sound/core/control.c (*kctl)->vd[idx].access = access; vd 210 sound/core/control.c (*kctl)->vd[idx].owner = file; vd 528 sound/core/control.c if (!(kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_USER)) { vd 533 sound/core/control.c if (kctl->vd[idx].owner != NULL && kctl->vd[idx].owner != file) { vd 562 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 573 sound/core/control.c vd = &kctl->vd[index_offset]; vd 576 sound/core/control.c if (!(vd->access & SNDRV_CTL_ELEM_ACCESS_INACTIVE)) vd 578 sound/core/control.c vd->access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 580 sound/core/control.c if (vd->access & SNDRV_CTL_ELEM_ACCESS_INACTIVE) vd 582 sound/core/control.c vd->access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 796 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 813 sound/core/control.c vd = &kctl->vd[index_offset]; vd 815 sound/core/control.c info->access = vd->access; vd 816 sound/core/control.c if (vd->owner) { vd 818 sound/core/control.c if (vd->owner == ctl) vd 820 sound/core/control.c info->owner = pid_vnr(vd->owner->pid); vd 852 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 860 sound/core/control.c vd = &kctl->vd[index_offset]; vd 861 sound/core/control.c if (!(vd->access & SNDRV_CTL_ELEM_ACCESS_READ) || kctl->get == NULL) vd 899 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 908 sound/core/control.c vd = &kctl->vd[index_offset]; vd 909 sound/core/control.c if (!(vd->access & SNDRV_CTL_ELEM_ACCESS_WRITE) || kctl->put == NULL || vd 910 sound/core/control.c (file && vd->owner && vd->owner != file)) { vd 962 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 972 sound/core/control.c vd = &kctl->vd[snd_ctl_get_ioff(kctl, &id)]; vd 973 sound/core/control.c if (vd->owner != NULL) vd 976 sound/core/control.c vd->owner = file; vd 990 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 1000 sound/core/control.c vd = &kctl->vd[snd_ctl_get_ioff(kctl, &id)]; vd 1001 sound/core/control.c if (vd->owner == NULL) vd 1003 sound/core/control.c else if (vd->owner != file) vd 1006 sound/core/control.c vd->owner = NULL; vd 1117 sound/core/control.c kctl->vd[i].access |= SNDRV_CTL_ELEM_ACCESS_TLV_READ; vd 1419 sound/core/control.c struct snd_kcontrol_volatile *vd = &kctl->vd[snd_ctl_get_ioff(kctl, id)]; vd 1424 sound/core/control.c if (op_flag == pairs[i].op && (vd->access & pairs[i].perm)) vd 1435 sound/core/control.c vd->owner != NULL && vd->owner != file) vd 1444 sound/core/control.c struct snd_kcontrol_volatile *vd = &kctl->vd[snd_ctl_get_ioff(kctl, id)]; vd 1447 sound/core/control.c if (!(vd->access & SNDRV_CTL_ELEM_ACCESS_TLV_READ)) vd 1472 sound/core/control.c struct snd_kcontrol_volatile *vd; vd 1494 sound/core/control.c vd = &kctl->vd[snd_ctl_get_ioff(kctl, &id)]; vd 1496 sound/core/control.c if (vd->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) { vd 258 sound/core/vmaster.c srec = kzalloc(struct_size(srec, slave.vd, slave->count), vd 264 sound/core/vmaster.c memcpy(srec->slave.vd, slave->vd, slave->count * sizeof(*slave->vd)); vd 272 sound/core/vmaster.c if (slave->vd[0].access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) vd 365 sound/core/vmaster.c memcpy(sctl->vd, slave->slave.vd, vd 366 sound/core/vmaster.c sctl->count * sizeof(*sctl->vd)); vd 425 sound/core/vmaster.c kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_TLV_READ; vd 844 sound/drivers/dummy.c dummy->cd_volume_ctl->vd[0].access &= vd 846 sound/drivers/dummy.c dummy->cd_switch_ctl->vd[0].access &= vd 849 sound/drivers/dummy.c dummy->cd_volume_ctl->vd[0].access |= vd 851 sound/drivers/dummy.c dummy->cd_switch_ctl->vd[0].access |= vd 842 sound/hda/hdmi_chmap.c kctl->vd[i].access |= SNDRV_CTL_ELEM_ACCESS_WRITE; vd 559 sound/i2c/cs8427.c chip->playback.pcm_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1231 sound/pci/ac97/ac97_codec.c kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_TLV_READ; vd 118 sound/pci/au88x0/au88x0_pcm.c kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 120 sound/pci/au88x0/au88x0_pcm.c kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1145 sound/pci/cmipci.c ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1174 sound/pci/cmipci.c ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1048 sound/pci/emu10k1/emupcm.c kctl->vd[idx].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1050 sound/pci/emu10k1/emupcm.c kctl->vd[idx].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1868 sound/pci/hda/hda_codec.c if (kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) { vd 1877 sound/pci/hda/hda_codec.c } else if (kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_TLV_READ) vd 36 sound/pci/ice1712/wm8776.c struct snd_kcontrol_volatile *vd; vd 47 sound/pci/ice1712/wm8776.c vd = &kctl->vd[index_offset]; vd 49 sound/pci/ice1712/wm8776.c vd->access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 51 sound/pci/ice1712/wm8776.c vd->access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 178 sound/pci/oxygen/oxygen_pcm.c chip->controls[CONTROL_SPDIF_PCM]->vd[0].access &= vd 227 sound/pci/oxygen/oxygen_pcm.c chip->controls[CONTROL_SPDIF_PCM]->vd[0].access |= vd 879 sound/pci/oxygen/xonar_wm87x6.c if ((ctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_INACTIVE) != access) { vd 880 sound/pci/oxygen/xonar_wm87x6.c ctl->vd[0].access ^= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 886 sound/pci/rme32.c rme32->spdif_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1010 sound/pci/rme32.c rme32->spdif_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1210 sound/pci/rme96.c rme96->spdif_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1330 sound/pci/rme96.c rme96->spdif_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 4481 sound/pci/rme9652/hdsp.c hdsp->spdif_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 4500 sound/pci/rme9652/hdsp.c hdsp->spdif_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 4650 sound/pci/rme9652/hdspm.c hdspm->playback_mixer_ctls[i]->vd[0].access = vd 4655 sound/pci/rme9652/hdspm.c hdspm->playback_mixer_ctls[i]->vd[0].access = vd 2317 sound/pci/rme9652/rme9652.c rme9652->spdif_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 2334 sound/pci/rme9652/rme9652.c rme9652->spdif_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1914 sound/pci/trident/trident_main.c trident->spdif_pcm_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1953 sound/pci/trident/trident_main.c trident->spdif_pcm_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 2887 sound/pci/trident/trident_main.c kctl->vd[num].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 2889 sound/pci/trident/trident_main.c kctl->vd[num].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1257 sound/pci/via82xx.c chip->dxs_controls[stream]->vd[0].access &= vd 1347 sound/pci/via82xx.c chip->dxs_controls[stream]->vd[0].access |= vd 401 sound/pci/ymfpci/ymfpci_main.c kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 680 sound/pci/ymfpci/ymfpci_main.c kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 981 sound/pci/ymfpci/ymfpci_main.c chip->spdif_pcm_ctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1080 sound/pci/ymfpci/ymfpci_main.c chip->spdif_pcm_ctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 139 sound/soc/codecs/sigmadsp.c if (!(kcontrol->vd[0].access & SNDRV_CTL_ELEM_ACCESS_INACTIVE)) vd 660 sound/soc/codecs/sigmadsp.c struct snd_kcontrol_volatile *vd; vd 674 sound/soc/codecs/sigmadsp.c vd = &ctrl->kcontrol->vd[0]; vd 675 sound/soc/codecs/sigmadsp.c if (active == (bool)(vd->access & SNDRV_CTL_ELEM_ACCESS_INACTIVE)) { vd 676 sound/soc/codecs/sigmadsp.c vd->access ^= SNDRV_CTL_ELEM_ACCESS_INACTIVE; vd 1110 sound/usb/midi.c ctl->vd[0].access |= vd 1127 sound/usb/midi.c ctl->vd[0].access &= vd 1318 sound/usb/mixer.c kcontrol->vd[0].access &= vd 1689 sound/usb/mixer.c kctl->vd[0].access |= vd 98 sound/usb/mixer_quirks.c kctl->vd[0].access |= vd 2350 sound/usb/mixer_quirks.c kctl->vd[0].access |= SNDRV_CTL_ELEM_ACCESS_TLV_READ; vd 2351 sound/usb/mixer_quirks.c kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK; vd 2358 sound/usb/mixer_quirks.c kctl->vd[0].access &= ~SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK; vd 1161 sound/usb/mixer_scarlett_gen2.c private->vol_ctls[index]->vd[0].access &= vd 1164 sound/usb/mixer_scarlett_gen2.c private->vol_ctls[index]->vd[0].access |= vd 1406 sound/usb/mixer_scarlett_gen2.c private->vol_ctls[i]->vd[0].access &=